summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore6
-rw-r--r--README.md9
-rw-r--r--build.sbt443
-rwxr-xr-xbuild.xml8
-rw-r--r--compare-build-dirs-ignore-patterns8
-rwxr-xr-xcompare-build-dirs.sh5
-rw-r--r--project/ScalaTool.scala44
-rw-r--r--project/build.properties1
-rw-r--r--project/plugins.sbt1
-rw-r--r--src/compiler/scala/reflect/quasiquotes/Reifiers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/PhaseAssembly.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala8
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Delambdafy.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/SampleTransform.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/CompilerControl.scala2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Global.scala2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Lexer.scala2
-rw-r--r--src/library/scala/Mutable.scala2
-rw-r--r--src/library/scala/Predef.scala4
-rw-r--r--src/library/scala/collection/SeqLike.scala2
-rw-r--r--src/library/scala/collection/concurrent/Map.scala2
-rw-r--r--src/library/scala/collection/immutable/Stream.scala2
-rw-r--r--src/library/scala/collection/mutable/AVLTree.scala4
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala2
-rw-r--r--src/library/scala/collection/mutable/UnrolledBuffer.scala2
-rw-r--r--src/library/scala/collection/mutable/WrappedArray.scala2
-rw-r--r--src/library/scala/concurrent/JavaConversions.scala2
-rw-r--r--src/library/scala/math/BigDecimal.scala2
-rw-r--r--src/library/scala/sys/process/BasicIO.scala2
-rw-r--r--src/library/scala/sys/process/ProcessLogger.scala2
-rw-r--r--src/library/scala/util/control/Exception.scala2
-rw-r--r--src/library/scala/util/hashing/MurmurHash3.scala2
-rw-r--r--src/library/scala/util/matching/Regex.scala2
-rw-r--r--src/partest-extras/scala/tools/partest/ASMConverters.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala2
-rw-r--r--src/reflect/scala/reflect/internal/ReificationSupport.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala2
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala4
-rw-r--r--src/reflect/scala/reflect/internal/tpe/FindMembers.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Index.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala2
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/html/page/DeprecatedIndex.scala58
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala7
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala8
-rw-r--r--test/files/jvm/inner.scala2
-rw-r--r--test/files/jvm/javaReflection/Test.scala4
-rw-r--r--test/files/pos/t7815.scala2
-rw-r--r--test/files/run/classfile-format-51.scala2
-rw-r--r--test/files/run/classfile-format-52.scala2
-rw-r--r--test/files/run/t7741a/Test.scala2
-rw-r--r--test/files/run/valueClassSelfType.scala52
-rw-r--r--test/junit/scala/collection/mutable/VectorTest.scala1
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala2
-rw-r--r--test/pending/jvm/javasigs.scala2
-rw-r--r--test/scaladoc/resources/SI-4476.scala9
-rw-r--r--test/scaladoc/resources/Trac4420.scala2
-rw-r--r--test/scaladoc/scalacheck/DeprecatedIndexTest.scala50
-rw-r--r--test/scaladoc/scalacheck/IndexTest.scala8
-rwxr-xr-xtools/scaladoc-compare2
75 files changed, 786 insertions, 80 deletions
diff --git a/.gitignore b/.gitignore
index 20d700dd12..d6571a377f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -48,3 +48,9 @@
# Standard symbolic link to build/quick/bin
/qbin
+
+# Sbt's target directories
+/target/
+/project/target/
+/project/project/target
+/build-sbt/
diff --git a/README.md b/README.md
index 830dfa8d6c..1651333188 100644
--- a/README.md
+++ b/README.md
@@ -138,6 +138,15 @@ The Scala build system is based on Apache Ant. Most required pre-compiled
libraries are part of the repository (in 'lib/'). The following however is
assumed to be installed on the build machine:
+## Building with Sbt (EXPERIMENTAL)
+
+The experimental sbt-based build definition has arrived! Run `sbt package`
+to build the compiler. You can run `sbt test` to run unit (JUnit) tests.
+Use `sbt test/it:test` to run integration (partest) tests.
+
+We would like to migrate to sbt build as quickly as possible. If you would
+like to help please contact scala-internals@ mailing list to discuss your
+ideas and coordinate your effort with others.
### Tips and tricks
diff --git a/build.sbt b/build.sbt
new file mode 100644
index 0000000000..0df2e6a800
--- /dev/null
+++ b/build.sbt
@@ -0,0 +1,443 @@
+/*
+ * The new, sbt-based build definition for Scala.
+ *
+ * What you see below is very much work-in-progress. Basics like compiling and packaging jars
+ * (into right location) work. Everything else is missing:
+ * building docs, placing shell scripts in right locations (so you can run compiler easily),
+ * running partest test, compiling and running JUnit test, and many, many other things.
+ *
+ * You'll notice that this build definition is much more complicated than your typical sbt build.
+ * The main reason is that we are not benefiting from sbt's conventions when it comes project
+ * layout. For that reason we have to configure a lot more explicitly. I've tried explain in
+ * comments the less obvious settings.
+ *
+ * This nicely leads me to explaining goal and non-goals of this build definition. Goals are:
+ *
+ * - to be easy to tweak it in case a bug or small inconsistency is found
+ * - to mimic Ant's behavior as closely as possible
+ * - to be super explicit about any departure from standard sbt settings
+ * - to achieve functional parity with Ant build as quickly as possible
+ * - to be readable and not necessarily succinct
+ * - to provide the nicest development experience for people hacking on Scala
+ *
+ * Non-goals are:
+ *
+ * - to have the shortest sbt build definition possible; we'll beat Ant definition
+ * easily and that will thrill us already
+ * - to remove irregularities from our build process right away
+ * - to modularize the Scala compiler or library further
+ *
+ * It boils down to simple rules:
+ *
+ * - project layout is set in stone for now
+ * - if you need to work on convincing sbt to follow non-standard layout then
+ * explain everything you did in comments
+ * - constantly check where Ant build produces class files, artifacts, what kind of other
+ * files generates and port all of that to here
+ *
+ * Note on bootstrapping:
+ *
+ * Let's start with reminder what bootstrapping means in our context. It's an answer
+ * to this question: which version of Scala are using to compile Scala? The fact that
+ * the question sounds circular suggests trickiness. Indeed, bootstrapping Scala
+ * compiler is a tricky process.
+ *
+ * Ant build used to have involved system of bootstrapping Scala. It would consist of
+ * three layers: starr, locker and quick. The sbt build for Scala ditches layering
+ * and strives to be as standard sbt project as possible. This means that we are simply
+ * building Scala with latest stable release of Scala.
+ * See this discussion for more details behind this decision:
+ * https://groups.google.com/d/topic/scala-internals/gp5JsM1E0Fo/discussion
+ */
+
+val bootstrapScalaVersion = "2.11.5"
+
+def withoutScalaLang(moduleId: ModuleID): ModuleID = moduleId exclude("org.scala-lang", "*")
+
+// exclusion of the scala-library transitive dependency avoids eviction warnings during `update`.
+val scalaParserCombinatorsDep = withoutScalaLang("org.scala-lang.modules" %% "scala-parser-combinators" % versionNumber("scala-parser-combinators"))
+val scalaXmlDep = withoutScalaLang("org.scala-lang.modules" %% "scala-xml" % versionNumber("scala-xml"))
+val partestDep = withoutScalaLang("org.scala-lang.modules" %% "scala-partest" % versionNumber("partest"))
+val partestInterfaceDep = withoutScalaLang("org.scala-lang.modules" %% "scala-partest-interface" % "0.5.0")
+val junitDep = "junit" % "junit" % "4.11"
+val junitIntefaceDep = "com.novocode" % "junit-interface" % "0.11" % "test"
+val jlineDep = "jline" % "jline" % versionProps("jline.version")
+val antDep = "org.apache.ant" % "ant" % "1.9.4"
+val scalacheckDep = withoutScalaLang("org.scalacheck" %% "scalacheck" % "1.11.4")
+
+lazy val commonSettings = clearSourceAndResourceDirectories ++ Seq[Setting[_]](
+ organization := "org.scala-lang",
+ version := "2.11.6-SNAPSHOT",
+ scalaVersion := bootstrapScalaVersion,
+ // we don't cross build Scala itself
+ crossPaths := false,
+ // do not add Scala library jar as a dependency automatically
+ autoScalaLibrary := false,
+ // we also do not add scala instance automatically because it introduces
+ // a circular instance, see: https://github.com/sbt/sbt/issues/1872
+ managedScalaInstance := false,
+ // this is a way to workaround issue described in https://github.com/sbt/sbt/issues/1872
+ // check it out for more details
+ scalaInstance := ScalaInstance(scalaVersion.value, appConfiguration.value.provider.scalaProvider.launcher getScala scalaVersion.value),
+ // we always assume that Java classes are standalone and do not have any dependency
+ // on Scala classes
+ compileOrder := CompileOrder.JavaThenScala,
+ javacOptions in Compile ++= Seq("-g", "-source", "1.5", "-target", "1.6"),
+ // we don't want any unmanaged jars; as a reminder: unmanaged jar is a jar stored
+ // directly on the file system and it's not resolved through Ivy
+ // Ant's build stored unmanaged jars in `lib/` directory
+ unmanagedJars in Compile := Seq.empty,
+ sourceDirectory in Compile := baseDirectory.value,
+ unmanagedSourceDirectories in Compile := List(baseDirectory.value),
+ scalaSource in Compile := (sourceDirectory in Compile).value,
+ javaSource in Compile := (sourceDirectory in Compile).value,
+ // resources are stored along source files in our current layout
+ resourceDirectory in Compile := (sourceDirectory in Compile).value,
+ // each subproject has to ask specifically for files they want to include
+ includeFilter in unmanagedResources in Compile := NothingFilter,
+ target := (baseDirectory in ThisBuild).value / "target" / thisProject.value.id,
+ target in Compile in doc := buildDirectory.value / "scaladoc" / thisProject.value.id,
+ classDirectory in Compile := buildDirectory.value / "quick/classes" / thisProject.value.id,
+ // given that classDirectory is overriden to be _outside_ of target directory, we have
+ // to make sure its being cleaned properly
+ cleanFiles += (classDirectory in Compile).value,
+ fork in run := true
+)
+
+// disable various tasks that are not needed for projects that are used
+// only for compiling code and not publishing it as a standalone artifact
+// we disable those tasks by overriding them and returning bogus files when
+// needed. This is a bit sketchy but I haven't found any better way.
+val disableDocsAndPublishingTasks = Seq[Setting[_]](
+ doc := file("!!! NO DOCS !!!"),
+ publishLocal := {},
+ publish := {},
+ packageBin in Compile := file("!!! NO PACKAGING !!!")
+)
+
+lazy val setJarLocation: Setting[_] =
+ artifactPath in packageBin in Compile := {
+ // two lines below are copied over from sbt's sources:
+ // https://github.com/sbt/sbt/blob/0.13/main/src/main/scala/sbt/Defaults.scala#L628
+ //val resolvedScalaVersion = ScalaVersion((scalaVersion in artifactName).value, (scalaBinaryVersion in artifactName).value)
+ //val resolvedArtifactName = artifactName.value(resolvedScalaVersion, projectID.value, artifact.value)
+ // if you would like to get a jar with version number embedded in it (as normally sbt does)
+ // uncomment the other definition of the `resolvedArtifactName`
+ val resolvedArtifact = artifact.value
+ val resolvedArtifactName = s"${resolvedArtifact.name}.${resolvedArtifact.extension}"
+ buildDirectory.value / "pack/lib" / resolvedArtifactName
+ }
+lazy val scalaSubprojectSettings: Seq[Setting[_]] = commonSettings :+ setJarLocation
+
+lazy val generatePropertiesFileSettings = Seq[Setting[_]](
+ copyrightString := "Copyright 2002-2013, LAMP/EPFL",
+ resourceGenerators in Compile += generateVersionPropertiesFile.map(file => Seq(file)).taskValue,
+ generateVersionPropertiesFile := generateVersionPropertiesFileImpl.value
+)
+
+val libIncludes: FileFilter = "*.tmpl" | "*.xml" | "*.js" | "*.css" | "rootdoc.txt"
+
+lazy val library = configureAsSubproject(project)
+ .settings(generatePropertiesFileSettings: _*)
+ .settings(
+ name := "scala-library",
+ scalacOptions in Compile ++= Seq[String]("-sourcepath", (scalaSource in Compile).value.toString),
+ // Workaround for a bug in `scaladoc` that it seems to not respect the `-sourcepath` option
+ // as a result of this bug, the compiler cannot even initialize Definitions without
+ // binaries of the library on the classpath. Specifically, we get this error:
+ // (library/compile:doc) scala.reflect.internal.FatalError: package class scala does not have a member Int
+ // Ant build does the same thing always: it puts binaries for documented classes on the classpath
+ // sbt never does this by default (which seems like a good default)
+ dependencyClasspath in Compile in doc += (classDirectory in Compile).value,
+ scalacOptions in Compile in doc ++= {
+ val libraryAuxDir = (baseDirectory in ThisBuild).value / "src/library-aux"
+ Seq("-doc-no-compile", libraryAuxDir.toString)
+ },
+ includeFilter in unmanagedResources in Compile := libIncludes)
+ .dependsOn (forkjoin)
+
+lazy val reflect = configureAsSubproject(project)
+ .settings(generatePropertiesFileSettings: _*)
+ .settings(name := "scala-reflect")
+ .dependsOn(library)
+
+val compilerIncludes: FileFilter =
+ "*.tmpl" | "*.xml" | "*.js" | "*.css" | "*.html" | "*.properties" | "*.swf" |
+ "*.png" | "*.gif" | "*.gif" | "*.txt"
+
+lazy val compiler = configureAsSubproject(project)
+ .settings(generatePropertiesFileSettings: _*)
+ .settings(
+ name := "scala-compiler",
+ libraryDependencies += antDep,
+ // this a way to make sure that classes from interactive and scaladoc projects
+ // end up in compiler jar (that's what Ant build does)
+ // we need to use LocalProject references (with strings) to deal with mutual recursion
+ mappings in Compile in packageBin :=
+ (mappings in Compile in packageBin).value ++
+ (mappings in Compile in packageBin in LocalProject("interactive")).value ++
+ (mappings in Compile in packageBin in LocalProject("scaladoc")).value ++
+ (mappings in Compile in packageBin in LocalProject("repl")).value,
+ includeFilter in unmanagedResources in Compile := compilerIncludes)
+ .dependsOn(library, reflect, asm)
+
+lazy val interactive = configureAsSubproject(project)
+ .settings(disableDocsAndPublishingTasks: _*)
+ .dependsOn(compiler)
+
+lazy val repl = configureAsSubproject(project)
+ .settings(libraryDependencies += jlineDep)
+ .settings(disableDocsAndPublishingTasks: _*)
+ .dependsOn(compiler)
+
+lazy val scaladoc = configureAsSubproject(project)
+ .settings(
+ libraryDependencies ++= Seq(scalaXmlDep, scalaParserCombinatorsDep, partestDep)
+ )
+ .settings(disableDocsAndPublishingTasks: _*)
+ .dependsOn(compiler)
+
+lazy val scalap = configureAsSubproject(project).
+ dependsOn(compiler)
+
+// deprecated Scala Actors project
+// TODO: it packages into actors.jar but it should be scala-actors.jar
+lazy val actors = configureAsSubproject(project)
+ .settings(generatePropertiesFileSettings: _*)
+ .settings(name := "scala-actors")
+ .dependsOn(library)
+
+lazy val forkjoin = configureAsForkOfJavaProject(project)
+
+lazy val asm = configureAsForkOfJavaProject(project)
+
+lazy val partestExtras = configureAsSubproject(Project("partest-extras", file(".") / "src" / "partest-extras"))
+ .dependsOn(repl)
+ .settings(clearSourceAndResourceDirectories: _*)
+ .settings(
+ libraryDependencies += partestDep,
+ unmanagedSourceDirectories in Compile := List(baseDirectory.value)
+ )
+
+lazy val junit = project.in(file("test") / "junit")
+ .dependsOn(library, reflect, compiler, partestExtras, scaladoc)
+ .settings(clearSourceAndResourceDirectories: _*)
+ .settings(commonSettings: _*)
+ .settings(
+ fork in Test := true,
+ libraryDependencies ++= Seq(junitDep, junitIntefaceDep),
+ testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v"),
+ unmanagedSourceDirectories in Test := List(baseDirectory.value)
+ )
+
+lazy val partestJavaAgent = (project in file(".") / "src" / "partest-javaagent").
+ dependsOn(asm).
+ settings(commonSettings: _*).
+ settings(
+ doc := file("!!! NO DOCS !!!"),
+ publishLocal := {},
+ publish := {},
+ // Setting name to "scala-partest-javaagent" so that the jar file gets that name, which the Runner relies on
+ name := "scala-partest-javaagent",
+ // writing jar file to $buildDirectory/pack/lib because that's where it's expected to be found
+ setJarLocation,
+ // add required manifest entry - previously included from file
+ packageOptions in (Compile, packageBin) +=
+ Package.ManifestAttributes( "Premain-Class" -> "scala.tools.partest.javaagent.ProfilingAgent" ),
+ // we need to build this to a JAR
+ exportJars := true
+ )
+
+lazy val test = project.
+ dependsOn(compiler, interactive, actors, repl, scalap, partestExtras, partestJavaAgent, asm, scaladoc).
+ configs(IntegrationTest).
+ settings(disableDocsAndPublishingTasks: _*).
+ settings(commonSettings: _*).
+ settings(Defaults.itSettings: _*).
+ settings(
+ libraryDependencies ++= Seq(partestDep, scalaXmlDep, partestInterfaceDep, scalacheckDep),
+ unmanagedBase in Test := baseDirectory.value / "files" / "lib",
+ unmanagedJars in Test <+= (unmanagedBase) (j => Attributed.blank(j)) map(identity),
+ // no main sources
+ sources in Compile := Seq.empty,
+ // test sources are compiled in partest run, not here
+ sources in IntegrationTest := Seq.empty,
+ fork in IntegrationTest := true,
+ javaOptions in IntegrationTest += "-Xmx1G",
+ testFrameworks += new TestFramework("scala.tools.partest.Framework"),
+ testOptions in IntegrationTest += Tests.Setup( () => root.base.getAbsolutePath + "/pull-binary-libs.sh" ! ),
+ definedTests in IntegrationTest += (
+ new sbt.TestDefinition(
+ "partest",
+ // marker fingerprint since there are no test classes
+ // to be discovered by sbt:
+ new sbt.testing.AnnotatedFingerprint {
+ def isModule = true
+ def annotationName = "partest"
+ }, true, Array())
+ )
+ )
+
+lazy val root = (project in file(".")).
+ aggregate(library, forkjoin, reflect, compiler, asm, interactive, repl,
+ scaladoc, scalap, actors, partestExtras, junit).settings(
+ sources in Compile := Seq.empty,
+ onLoadMessage := """|*** Welcome to the sbt build definition for Scala! ***
+ |This build definition has an EXPERIMENTAL status. If you are not
+ |interested in testing or working on the build itself, please use
+ |the Ant build definition for now. Check README.md for more information.""".stripMargin
+ )
+
+lazy val dist = (project in file("dist")).settings(
+ mkBin := mkBinImpl.value
+)
+
+/**
+ * Configures passed project as a subproject (e.g. compiler or repl)
+ * with common settings attached to it.
+ *
+ * Typical usage is:
+ *
+ * lazy val mySubproject = configureAsSubproject(project)
+ *
+ * We pass `project` as an argument which is in fact a macro call. This macro determines
+ * project.id based on the name of the lazy val on the left-hand side.
+ */
+def configureAsSubproject(project: Project): Project = {
+ val base = file(".") / "src" / project.id
+ (project in base).settings(scalaSubprojectSettings: _*)
+}
+
+/**
+ * Configuration for subprojects that are forks of some Java projects
+ * we depend on. At the moment there are just two: asm and forkjoin.
+ *
+ * We do not publish artifacts for those projects but we package their
+ * binaries in a jar of other project (compiler or library).
+ *
+ * For that reason we disable docs generation, packaging and publishing.
+ */
+def configureAsForkOfJavaProject(project: Project): Project = {
+ val base = file(".") / "src" / project.id
+ (project in base).
+ settings(commonSettings: _*).
+ settings(disableDocsAndPublishingTasks: _*).
+ settings(
+ sourceDirectory in Compile := baseDirectory.value,
+ javaSource in Compile := (sourceDirectory in Compile).value,
+ sources in Compile in doc := Seq.empty,
+ classDirectory in Compile := buildDirectory.value / "libs/classes" / thisProject.value.id
+ )
+}
+
+lazy val buildDirectory = settingKey[File]("The directory where all build products go. By default ./build")
+lazy val copyrightString = settingKey[String]("Copyright string.")
+lazy val generateVersionPropertiesFile = taskKey[File]("Generating version properties file.")
+lazy val mkBin = taskKey[Seq[File]]("Generate shell script (bash or Windows batch).")
+
+lazy val generateVersionPropertiesFileImpl: Def.Initialize[Task[File]] = Def.task {
+ val propFile = (resourceManaged in Compile).value / s"${thisProject.value.id}.properties"
+ val props = new java.util.Properties
+
+ /**
+ * Regexp that splits version number split into two parts: version and suffix.
+ * Examples of how the split is performed:
+ *
+ * "2.11.5": ("2.11.5", null)
+ * "2.11.5-acda7a": ("2.11.5", "-acda7a")
+ * "2.11.5-SNAPSHOT": ("2.11.5", "-SNAPSHOT")
+ *
+ */
+ val versionSplitted = """([\w+\.]+)(-[\w+\.]+)??""".r
+
+ val versionSplitted(ver, suffixOrNull) = version.value
+ val osgiSuffix = suffixOrNull match {
+ case null => "-VFINAL"
+ case "-SNAPSHOT" => ""
+ case suffixStr => suffixStr
+ }
+
+ def executeTool(tool: String) = {
+ val cmd =
+ if (System.getProperty("os.name").toLowerCase.contains("windows"))
+ s"cmd.exe /c tools\\$tool.bat -p"
+ else s"tools/$tool"
+ Process(cmd).lines.head
+ }
+
+ val commitDate = executeTool("get-scala-commit-date")
+ val commitSha = executeTool("get-scala-commit-sha")
+
+ props.put("version.number", s"${version.value}-$commitDate-$commitSha")
+ props.put("maven.version.number", s"${version.value}")
+ props.put("osgi.version.number", s"$ver.v$commitDate$osgiSuffix-$commitSha")
+ props.put("copyright.string", copyrightString.value)
+
+ // unfortunately, this will write properties in arbitrary order
+ // this makes it harder to test for stability of generated artifacts
+ // consider using https://github.com/etiennestuder/java-ordered-properties
+ // instead of java.util.Properties
+ IO.write(props, null, propFile)
+
+ propFile
+}
+
+// Defining these settings is somewhat redundant as we also redefine settings that depend on them.
+// However, IntelliJ's project import works better when these are set correctly.
+def clearSourceAndResourceDirectories = Seq(Compile, Test).flatMap(config => inConfig(config)(Seq(
+ unmanagedSourceDirectories := Nil,
+ managedSourceDirectories := Nil,
+ unmanagedResourceDirectories := Nil,
+ managedResourceDirectories := Nil
+)))
+
+lazy val mkBinImpl: Def.Initialize[Task[Seq[File]]] = Def.task {
+ def mkScalaTool(mainCls: String, classpath: Seq[Attributed[File]]): ScalaTool =
+ ScalaTool(mainClass = mainCls,
+ classpath = classpath.toList.map(_.data.getAbsolutePath),
+ properties = Map.empty,
+ javaOpts = "-Xmx256M -Xms32M",
+ toolFlags = "")
+ val rootDir = (classDirectory in Compile in compiler).value
+ def writeScripts(scalaTool: ScalaTool, file: String, outDir: File): Seq[File] =
+ Seq(
+ scalaTool.writeScript(file, "unix", rootDir, outDir),
+ scalaTool.writeScript(file, "windows", rootDir, outDir)
+ )
+ def mkQuickBin(file: String, mainCls: String, classpath: Seq[Attributed[File]]): Seq[File] = {
+ val scalaTool = mkScalaTool(mainCls, classpath)
+ val outDir = buildDirectory.value / "quick/bin"
+ writeScripts(scalaTool, file, outDir)
+ }
+
+ def mkPackBin(file: String, mainCls: String): Seq[File] = {
+ val scalaTool = mkScalaTool(mainCls, classpath = Nil)
+ val outDir = buildDirectory.value / "pack/bin"
+ writeScripts(scalaTool, file, outDir)
+ }
+
+ def mkBin(file: String, mainCls: String, classpath: Seq[Attributed[File]]): Seq[File] =
+ mkQuickBin(file, mainCls, classpath) ++ mkPackBin(file, mainCls)
+
+ mkBin("scala" , "scala.tools.nsc.MainGenericRunner", (fullClasspath in Compile in repl).value) ++
+ mkBin("scalac" , "scala.tools.nsc.Main", (fullClasspath in Compile in compiler).value) ++
+ mkBin("fsc" , "scala.tools.nsc.CompileClient", (fullClasspath in Compile in compiler).value) ++
+ mkBin("scaladoc" , "scala.tools.nsc.ScalaDoc", (fullClasspath in Compile in scaladoc).value) ++
+ mkBin("scalap" , "scala.tools.scalap.Main", (fullClasspath in Compile in scalap).value)
+}
+
+buildDirectory in ThisBuild := (baseDirectory in ThisBuild).value / "build-sbt"
+
+lazy val versionProps: Map[String, String] = {
+ import java.io.FileInputStream
+ import java.util.Properties
+ val props = new Properties()
+ val in = new FileInputStream(file("versions.properties"))
+ try props.load(in)
+ finally in.close()
+ import scala.collection.JavaConverters._
+ props.asScala.toMap
+}
+
+def versionNumber(name: String): String =
+ versionProps(s"$name.version.number")
diff --git a/build.xml b/build.xml
index 4b79b68a02..0a67f8a563 100755
--- a/build.xml
+++ b/build.xml
@@ -165,7 +165,7 @@ TODO:
<property name="build.dir" value="${basedir}/build"/>
<property name="build-deps.dir" value="${build.dir}/deps"/>
<property name="build-libs.dir" value="${build.dir}/libs"/>
- <property name="build-asm.dir" value="${build.dir}/asm"/>
+ <property name="build-asm.dir" value="${build-libs.dir}"/>
<property name="build-forkjoin.dir" value="${build-libs.dir}"/>
<property name="build-locker.dir" value="${build.dir}/locker"/>
<property name="build-quick.dir" value="${build.dir}/quick"/>
@@ -588,8 +588,8 @@ TODO:
</propertyfile>
</then></if>
- <path id="forkjoin.classpath" path="${build-libs.dir}/classes/forkjoin"/>
- <path id="asm.classpath" path="${build-asm.dir}/classes"/>
+ <path id="forkjoin.classpath" path="${build-forkjoin.dir}/classes/forkjoin"/>
+ <path id="asm.classpath" path="${build-asm.dir}/classes/asm"/>
<property name="forkjoin-classes" refid="forkjoin.classpath"/>
<property name="asm-classes" refid="asm.classpath"/>
@@ -1061,7 +1061,7 @@ TODO:
============================================================================ -->
<target name="asm.done" depends="init"> <simple-javac project="asm" jar="no"/> </target>
- <target name="forkjoin.done" depends="init"> <simple-javac project="forkjoin" args="-XDignore.symbol.file"/></target>
+ <target name="forkjoin.done" depends="init"> <simple-javac project="forkjoin" args="-XDignore.symbol.file" jar="no"/></target>
<!-- For local development only. We only allow released versions of Scala for STARR.
This builds quick (core only) and publishes it with a generated version number,
diff --git a/compare-build-dirs-ignore-patterns b/compare-build-dirs-ignore-patterns
new file mode 100644
index 0000000000..8c8160ba15
--- /dev/null
+++ b/compare-build-dirs-ignore-patterns
@@ -0,0 +1,8 @@
+.DS_Store
+*.complete
+locker
+deps
+scala-continuations-*.jar
+scala-parser-combinators*.jar
+scala-swing*.jar
+scala-xml*.jar
diff --git a/compare-build-dirs.sh b/compare-build-dirs.sh
new file mode 100755
index 0000000000..f6806dd422
--- /dev/null
+++ b/compare-build-dirs.sh
@@ -0,0 +1,5 @@
+# Compares build directories generated by Ant and sbt build definitions
+# This let's us to see how far are we from achieving perfect parity
+# between the builds
+
+diff -X compare-build-dirs-ignore-patterns -qr build/ build-sbt/
diff --git a/project/ScalaTool.scala b/project/ScalaTool.scala
new file mode 100644
index 0000000000..559b215c18
--- /dev/null
+++ b/project/ScalaTool.scala
@@ -0,0 +1,44 @@
+import sbt._
+import org.apache.commons.lang3.StringUtils.replaceEach
+
+/**
+ * A class that generates a shell or batch script to execute a Scala program.
+ *
+ * This is a simplified copy of Ant task (see scala.tools.ant.ScalaTool).
+ */
+case class ScalaTool(mainClass: String,
+ classpath: List[String],
+ properties: Map[String, String],
+ javaOpts: String,
+ toolFlags: String) {
+ // For classpath, the platform specific
+ // demarcation of any script variables (e.g. `${SCALA_HOME}` or
+ // `%SCALA_HOME%`) can be specified in a platform independent way (e.g.
+ // `@SCALA_HOME@`) and automatically translated for you.
+ def patchedToolScript(template: String, platform: String) = {
+ val varRegex = """@(\w+)@""" // the group should be able to capture each of the keys of the map below
+
+ val variables = Map(
+ ("@@" -> "@"), // for backwards compatibility
+ ("@class@" -> mainClass),
+ ("@properties@" -> (properties map { case (k, v) => s"""-D$k="$v""""} mkString " ")),
+ ("@javaflags@" -> javaOpts),
+ ("@toolflags@" -> toolFlags),
+ ("@classpath@" -> (platform match {
+ case "unix" => classpath.mkString(":").replace('\\', '/').replaceAll(varRegex, """\${$1}""")
+ case "windows" => classpath.mkString(";").replace('/', '\\').replaceAll(varRegex, "%$1%")
+ }))
+ )
+
+ val (from, to) = variables.unzip
+ replaceEach(template, from.toArray, to.toArray)
+ }
+
+ def writeScript(file: String, platform: String, rootDir: File, outDir: File): File = {
+ val templatePath = s"scala/tools/ant/templates/tool-$platform.tmpl"
+ val suffix = platform match { case "windows" => ".bat" case _ => "" }
+ val scriptFile = outDir / s"$file$suffix"
+ IO.write(scriptFile, patchedToolScript(IO.read(rootDir / templatePath), platform))
+ scriptFile
+ }
+}
diff --git a/project/build.properties b/project/build.properties
new file mode 100644
index 0000000000..748703f770
--- /dev/null
+++ b/project/build.properties
@@ -0,0 +1 @@
+sbt.version=0.13.7
diff --git a/project/plugins.sbt b/project/plugins.sbt
new file mode 100644
index 0000000000..dc266a8db1
--- /dev/null
+++ b/project/plugins.sbt
@@ -0,0 +1 @@
+libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.3.2" \ No newline at end of file
diff --git a/src/compiler/scala/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/reflect/quasiquotes/Reifiers.scala
index 7c0e7dfbb8..e753c9787a 100644
--- a/src/compiler/scala/reflect/quasiquotes/Reifiers.scala
+++ b/src/compiler/scala/reflect/quasiquotes/Reifiers.scala
@@ -317,7 +317,7 @@ trait Reifiers { self: Quasiquotes =>
* Reification of non-trivial list is done in two steps:
*
* 1. split the list into groups where every placeholder is always
- * put in a group of it's own and all subsquent non-holeMap are
+ * put in a group of its own and all subsquent non-holeMap are
* grouped together; element is considered to be a placeholder if it's
* in the domain of the fill function;
*
diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
index e1cfa63960..4b32aab5ee 100644
--- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala
+++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
@@ -127,7 +127,7 @@ trait PhaseAssembly {
}
/* Find all edges in the given graph that are hard links. For each hard link we
- * need to check that its the only dependency. If not, then we will promote the
+ * need to check that it's the only dependency. If not, then we will promote the
* other dependencies down
*/
def validateAndEnforceHardlinks() {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 67e91ae857..4f195c2985 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1259,7 +1259,7 @@ self =>
atPos(start) { Apply(t3, exprsBuf.toList) }
}
if (inPattern) stringCheese
- else withPlaceholders(stringCheese, isAny = true) // strinterpolator params are Any* by definition
+ else withPlaceholders(stringCheese, isAny = true) // string interpolator params are Any* by definition
}
/* ------------- NEW LINES ------------------------------------------------- */
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
index 0cdf629ce1..843648282b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
@@ -109,7 +109,7 @@ abstract class ICodeCheckers {
/** Only called when m1 < m2, so already known that (m1 ne m2).
*/
- private def isConfict(m1: IMember, m2: IMember, canOverload: Boolean) = (
+ private def isConflict(m1: IMember, m2: IMember, canOverload: Boolean) = (
(m1.symbol.name == m2.symbol.name) &&
(!canOverload || (m1.symbol.tpe =:= m2.symbol.tpe))
)
@@ -119,11 +119,11 @@ abstract class ICodeCheckers {
clasz = cls
for (f1 <- cls.fields ; f2 <- cls.fields ; if f1 < f2)
- if (isConfict(f1, f2, canOverload = false))
+ if (isConflict(f1, f2, canOverload = false))
icodeError("Repetitive field name: " + f1.symbol.fullName)
for (m1 <- cls.methods ; m2 <- cls.methods ; if m1 < m2)
- if (isConfict(m1, m2, canOverload = true))
+ if (isConflict(m1, m2, canOverload = true))
icodeError("Repetitive method: " + m1.symbol.fullName)
clasz.methods foreach check
@@ -471,7 +471,7 @@ abstract class ICodeCheckers {
pushStack(local.kind)
case LOAD_FIELD(field, isStatic) =>
- // the symbol's owner should contain it's field, but
+ // the symbol's owner should contain its field, but
// this is already checked by the type checker, no need
// to redo that here
if (isStatic) ()
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index b0ad5bdaf9..058b6a161d 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -572,7 +572,7 @@ abstract class TypeFlowAnalysis {
- `inlined` : These blocks were spliced into the method's CFG as part of inlining. Being new blocks, they haven't been visited yet by the typeflow analysis.
- - `staleIn` : These blocks are what `doInline()` calls `afterBlock`s, ie the new home for instructions that previously appearead
+ - `staleIn` : These blocks are what `doInline()` calls `afterBlock`s, ie the new home for instructions that previously appeared
after a callsite in a `staleOut` block.
Based on the above information, we have to bring up-to-date the caches that `forwardAnalysis` and `blockTransfer` use to skip blocks and instructions.
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala
index 162da4236a..eadc404bee 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala
@@ -329,10 +329,12 @@ final class BCodeAsmCommon[G <: Global](val global: G) {
* Build the [[InlineInfo]] for a class symbol.
*/
def buildInlineInfoFromClassSymbol(classSym: Symbol, classSymToInternalName: Symbol => InternalName, methodSymToDescriptor: Symbol => String): InlineInfo = {
- val selfType = {
+ val traitSelfType = if (classSym.isTrait && !classSym.isImplClass) {
// The mixin phase uses typeOfThis for the self parameter in implementation class methods.
val selfSym = classSym.typeOfThis.typeSymbol
if (selfSym != classSym) Some(classSymToInternalName(selfSym)) else None
+ } else {
+ None
}
val isEffectivelyFinal = classSym.isEffectivelyFinal
@@ -394,6 +396,6 @@ final class BCodeAsmCommon[G <: Global](val global: G) {
}
}).toMap
- InlineInfo(selfType, isEffectivelyFinal, methodInlineInfos, warning)
+ InlineInfo(traitSelfType, isEffectivelyFinal, methodInlineInfos, warning)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala
index d690542f0e..e61190bf3a 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala
@@ -315,7 +315,7 @@ abstract class BTypes {
case ArrayBType(component) =>
if (other == ObjectReference || other == jlCloneableReference || other == jioSerializableReference) true
else other match {
- case ArrayBType(otherComponoent) => component.conformsTo(otherComponoent).orThrow
+ case ArrayBType(otherComponent) => component.conformsTo(otherComponent).orThrow
case _ => false
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index 3704acb055..1b6631e7a4 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -173,7 +173,7 @@ abstract class DeadCodeElimination extends SubComponent {
moveToWorkList()
case LOAD_FIELD(sym, isStatic) if isStatic || !inliner.isClosureClass(sym.owner) =>
- // static load may trigger static initization.
+ // static load may trigger static initialization.
// non-static load can throw NPE (but we know closure fields can't be accessed via a
// null reference.
moveToWorkList()
@@ -191,7 +191,7 @@ abstract class DeadCodeElimination extends SubComponent {
case LOAD_EXCEPTION(_) | DUP(_) | LOAD_MODULE(_) => true
case _ =>
dropOf((bb1, idx1)) = (bb,idx) :: dropOf.getOrElse((bb1, idx1), Nil)
- debuglog("DROP is innessential: " + i + " because of: " + bb1(idx1) + " at " + bb1 + ":" + idx1)
+ debuglog("DROP is inessential: " + i + " because of: " + bb1(idx1) + " at " + bb1 + ":" + idx1)
false
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index d0fca12e6a..86685d46de 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -315,7 +315,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
}
def rewriteDelayedInit() {
- /* XXX This is not corect: remainingConstrStats.nonEmpty excludes too much,
+ /* XXX This is not correct: remainingConstrStats.nonEmpty excludes too much,
* but excluding it includes too much. The constructor sequence being mimicked
* needs to be reproduced with total fidelity.
*
diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
index 2d33b35241..45a89ac594 100644
--- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
+++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
@@ -77,7 +77,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
// the result of the transformFunction method.
sealed abstract class TransformedFunction
- // A class definition for the lambda, an expression insantiating the lambda class
+ // A class definition for the lambda, an expression instantiating the lambda class
case class DelambdafyAnonClass(lambdaClassDef: ClassDef, newExpr: Tree) extends TransformedFunction
// here's the main entry point of the transform
@@ -86,7 +86,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
case fun @ Function(_, _) =>
transformFunction(fun) match {
case DelambdafyAnonClass(lambdaClassDef, newExpr) =>
- // a lambda beccomes a new class, an instantiation expression
+ // a lambda becomes a new class, an instantiation expression
val pkg = lambdaClassDef.symbol.owner
// we'll add the lambda class to the package later
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index 38671ebaae..df622d4d1d 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -219,7 +219,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
* l$
* } or
* <rhs> when the lazy value has type Unit (for which there is no field
- * to cache it's value.
+ * to cache its value.
*
* Similarly as for normal lazy val members (see Mixin), the result will be a tree of the form
* { if ((bitmap&n & MASK) == 0) this.l$compute()
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 408f4466e1..11f9483f77 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -26,7 +26,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
*/
private val treatedClassInfos = perRunCaches.newMap[Symbol, Type]() withDefaultValue NoType
- /** Map a lazy, mixedin field accessor to it's trait member accessor */
+ /** Map a lazy, mixedin field accessor to its trait member accessor */
private val initializer = perRunCaches.newMap[Symbol, Symbol]()
// --------- helper functions -----------------------------------------------
@@ -886,7 +886,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
/* Complete lazy field accessors. Applies only to classes,
- * for it's own (non inherited) lazy fields. If 'checkinit'
+ * for its own (non inherited) lazy fields. If 'checkinit'
* is enabled, getters that check for the initialized bit are
* generated, and the class constructor is changed to set the
* initialized bits.
diff --git a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
index cffb483072..ba303f7c2b 100644
--- a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
@@ -27,7 +27,7 @@ abstract class SampleTransform extends Transform {
tree1 match {
case Block(List(), expr) => // a simple optimization
expr
- case Block(defs, sup @ Super(qual, mix)) => // A hypthothetic transformation, which replaces
+ case Block(defs, sup @ Super(qual, mix)) => // A hypothetical transformation, which replaces
// {super} by {super.sample}
treeCopy.Block( // `copy` is the usual lazy tree copier
tree1, defs,
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 086512677e..1a24c668ba 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -610,7 +610,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
exitingSpecialize(sClass setInfo specializedInfoType)
val fullEnv = outerEnv ++ env
- /* Enter 'sym' in the scope of the current specialized class. It's type is
+ /* Enter 'sym' in the scope of the current specialized class. Its type is
* mapped through the active environment, binding type variables to concrete
* types. The existing typeEnv for `sym` is composed with the current active
* environment
@@ -1368,7 +1368,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
)
def specializeCalls(unit: CompilationUnit) = new TypingTransformer(unit) {
- /** Map a specializable method to it's rhs, when not deferred. */
+ /** Map a specializable method to its rhs, when not deferred. */
val body = perRunCaches.newMap[Symbol, Tree]()
/** Map a specializable method to its value parameter symbols. */
diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
index 2c27bdb03a..0574869714 100644
--- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
@@ -57,7 +57,7 @@ trait AnalyzerPlugins { self: Analyzer =>
* `analyzer.transformed` hash map, indexed by the definition's rhs tree.
*
* NOTE: Invoking the type checker can lead to cyclic reference errors. For instance, if this
- * method is called from the type completer of a recursive method, type checking the mehtod
+ * method is called from the type completer of a recursive method, type checking the method
* rhs will invoke the same completer again. It might be possible to avoid this situation by
* assigning `tpe` to `defTree.symbol` (untested) - the final type computed by this method
* will then be assigned to the definition's symbol by monoTypeCompleter (in Namers).
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index ca25e59c4b..542f58795a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -167,7 +167,7 @@ trait Contexts { self: Analyzer =>
* afterwards errors are thrown. This is configured in `rootContext`. Additionally, more
* fine grained control is needed based on the kind of error; ambiguity errors are often
* suppressed during exploratory typing, such as determining whether `a == b` in an argument
- * position is an assignment or a named argument, when `Infererencer#isApplicableSafe` type checks
+ * position is an assignment or a named argument, when `Inferencer#isApplicableSafe` type checks
* applications with and without an expected type, or whtn `Typer#tryTypedApply` tries to fit arguments to
* a function type with/without implicit views.
*
@@ -1108,10 +1108,10 @@ trait Contexts { self: Analyzer =>
//
// A type-import-on-demand declaration never causes any other declaration to be shadowed.
//
- // Scala: Bindings of different kinds have a precedence defined on them:
+ // Scala: Bindings of different kinds have a precedence defined on them:
//
- // 1) Definitions and declarations that are local, inherited, or made available by a
- // package clause in the same compilation unit where the definition occurs have
+ // 1) Definitions and declarations that are local, inherited, or made available by a
+ // package clause in the same compilation unit where the definition occurs have
// highest precedence.
// 2) Explicit imports have next highest precedence.
def depthOk(imp: ImportInfo) = (
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 27e17fc65f..f9582a54ff 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -376,7 +376,7 @@ trait Infer extends Checkable {
}
/** Overload which allocates fresh type vars.
* The other one exists because apparently inferExprInstance needs access to the typevars
- * after the call, and its wasteful to return a tuple and throw it away almost every time.
+ * after the call, and it's wasteful to return a tuple and throw it away almost every time.
*/
private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean): List[Type] =
exprTypeArgs(tparams map freshVar, tparams, restpe, pt, useWeaklyCompatible)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 24238b8e41..77c49a862a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -1125,7 +1125,7 @@ trait Namers extends MethodSynthesis {
for (vparam <- vparams) {
if (vparam.tpt.isEmpty) {
val overriddenParamTp = overriddenParams.head.tpe
- // references to type parameteres in overriddenParamTp link to the type skolems, so the
+ // references to type parameters in overriddenParamTp link to the type skolems, so the
// assigned type is consistent with the other / existing parameter types in vparamSymss.
vparam.symbol setInfo overriddenParamTp
vparam.tpt defineType overriddenParamTp setPos vparam.pos.focus
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 5abfbe850f..a3a4c70d1e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -584,7 +584,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
!other.isDeferred && other.isJavaDefined && !sym.enclClass.isSubClass(other.enclClass) && {
// #3622: erasure operates on uncurried types --
// note on passing sym in both cases: only sym.isType is relevant for uncurry.transformInfo
- // !!! erasure.erasure(sym, uncurry.transformInfo(sym, tp)) gives erreneous of inaccessible type - check whether that's still the case!
+ // !!! erasure.erasure(sym, uncurry.transformInfo(sym, tp)) gives erroneous or inaccessible type - check whether that's still the case!
def uncurryAndErase(tp: Type) = erasure.erasure(sym)(uncurry.transformInfo(sym, tp))
val tp1 = uncurryAndErase(clazz.thisType.memberType(sym))
val tp2 = uncurryAndErase(clazz.thisType.memberType(other))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 7417c5364e..27a574a449 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -4301,7 +4301,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val selector1 = atPos(tree.pos.focusStart) { if (arity == 1) ids.head else gen.mkTuple(ids) }
// SI-8120 If we don't duplicate the cases, the original Match node will share trees with ones that
// receive symbols owned by this function. However if, after a silent mode session, we discard
- // this Function and try a different approach (e.g. applying a view to the reciever) we end up
+ // this Function and try a different approach (e.g. applying a view to the receiver) we end up
// with orphaned symbols which blows up far down the pipeline (or can be detected with -Ycheck:typer).
val body = treeCopy.Match(tree, selector1, (cases map duplicateAndKeepPositions).asInstanceOf[List[CaseDef]])
typed1(atPos(tree.pos) { Function(params, body) }, mode, pt)
diff --git a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
index 2e4f6b08e9..9caebb711d 100644
--- a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
@@ -302,7 +302,7 @@ trait CompilerControl { self: Global =>
abstract class WorkItem extends (() => Unit) {
val onCompilerThread = self.onCompilerThread
- /** Raise a MissingReponse, if the work item carries a response. */
+ /** Raise a MissingResponse, if the work item carries a response. */
def raiseMissing(): Unit
}
diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala
index 2d09435f60..727bfdd510 100644
--- a/src/interactive/scala/tools/nsc/interactive/Global.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Global.scala
@@ -315,7 +315,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
private val NoResponse: Response[_] = new Response[Any]
/** The response that is currently pending, i.e. the compiler
- * is working on providing an asnwer for it.
+ * is working on providing an answer for it.
*/
private var pendingResponse: Response[_] = NoResponse
diff --git a/src/interactive/scala/tools/nsc/interactive/Lexer.scala b/src/interactive/scala/tools/nsc/interactive/Lexer.scala
index 82e8de3f3d..7daf24c204 100644
--- a/src/interactive/scala/tools/nsc/interactive/Lexer.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Lexer.scala
@@ -195,7 +195,7 @@ class Lexer(rd: Reader) {
case 'n' => putAcceptString("null"); token = NullLit
case '"' => getString()
case '-' | '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => getNumber()
- case _ => error("unrecoginezed start of token: '"+ch+"'")
+ case _ => error("unrecognized start of token: '"+ch+"'")
}
//println("["+token+"]")
}
diff --git a/src/library/scala/Mutable.scala b/src/library/scala/Mutable.scala
index 8ef0424db6..43f98ee4df 100644
--- a/src/library/scala/Mutable.scala
+++ b/src/library/scala/Mutable.scala
@@ -11,7 +11,7 @@
package scala
/**
- * A marker trait for mutable datatructures such as mutable collections
+ * A marker trait for mutable data structures such as mutable collections
*
* @since 2.8
*/
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index bf7739345e..42448b38f2 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -58,7 +58,7 @@ import scala.io.StdIn
* condition fails, then the caller of the function is to blame rather than a
* logical error having been made within `addNaturals` itself. `ensures` is a
* form of `assert` that declares the guarantee the function is providing with
- * regards to it's return value.
+ * regards to its return value.
*
* === Implicit Conversions ===
* A number of commonly applied implicit conversions are also defined here, and
@@ -85,7 +85,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
type String = java.lang.String
type Class[T] = java.lang.Class[T]
- // miscelleaneous -----------------------------------------------------
+ // miscellaneous -----------------------------------------------------
scala.`package` // to force scala package object to be seen.
scala.collection.immutable.List // to force Nil, :: to be seen.
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index 66fce0f902..b775480532 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -413,7 +413,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
* @inheritdoc
*
* Another way to express this
- * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
+ * is that `xs union ys` computes the order-preserving multi-set union of `xs` and `ys`.
* `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
*
* $willNotTerminateInf
diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala
index 2eea15b8dc..cfb567abe9 100644
--- a/src/library/scala/collection/concurrent/Map.scala
+++ b/src/library/scala/collection/concurrent/Map.scala
@@ -49,7 +49,7 @@ trait Map[A, B] extends scala.collection.mutable.Map[A, B] {
def putIfAbsent(k: A, v: B): Option[B]
/**
- * Removes the entry for the specified key if its currently mapped to the
+ * Removes the entry for the specified key if it's currently mapped to the
* specified value.
*
* $atomicop
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index f303e79bb3..cf7b7e272a 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -360,7 +360,7 @@ self =>
* `List(BigInt(12)) ++ fibs`.
*
* @tparam B The element type of the returned collection.'''That'''
- * @param that The [[scala.collection.GenTraversableOnce]] the be contatenated
+ * @param that The [[scala.collection.GenTraversableOnce]] the be concatenated
* to this `Stream`.
* @return A new collection containing the result of concatenating `this` with
* `that`.
diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala
index cc2acb74d4..b63d0aae33 100644
--- a/src/library/scala/collection/mutable/AVLTree.scala
+++ b/src/library/scala/collection/mutable/AVLTree.scala
@@ -27,7 +27,7 @@ private[mutable] sealed trait AVLTree[+A] extends Serializable {
/**
* Returns a new tree containing the given element.
- * Thows an IllegalArgumentException if element is already present.
+ * Throws an IllegalArgumentException if element is already present.
*
*/
def insert[B >: A](value: B, ordering: Ordering[B]): AVLTree[B] = Node(value, Leaf, Leaf)
@@ -95,7 +95,7 @@ private case class Node[A](data: A, left: AVLTree[A], right: AVLTree[A]) extends
/**
* Returns a new tree containing the given element.
- * Thows an IllegalArgumentException if element is already present.
+ * Throws an IllegalArgumentException if element is already present.
*
*/
override def insert[B >: A](value: B, ordering: Ordering[B]) = {
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 65d9c35052..b48a32fa37 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -449,7 +449,7 @@ private[collection] object HashTable {
// h
/* OLD VERSION
- * quick, but bad for sequence 0-10000 - little enthropy in higher bits
+ * quick, but bad for sequence 0-10000 - little entropy in higher bits
* since 2003 */
// var h: Int = hcode + ~(hcode << 9)
// h = h ^ (h >>> 14)
diff --git a/src/library/scala/collection/mutable/UnrolledBuffer.scala b/src/library/scala/collection/mutable/UnrolledBuffer.scala
index 693c47d86e..0a84a2b2a8 100644
--- a/src/library/scala/collection/mutable/UnrolledBuffer.scala
+++ b/src/library/scala/collection/mutable/UnrolledBuffer.scala
@@ -85,7 +85,7 @@ extends scala.collection.mutable.AbstractBuffer[T]
def classTagCompanion = UnrolledBuffer
- /** Concatenates the targer unrolled buffer to this unrolled buffer.
+ /** Concatenates the target unrolled buffer to this unrolled buffer.
*
* The specified buffer `that` is cleared after this operation. This is
* an O(1) operation.
diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala
index 53fca9f779..8740bda835 100644
--- a/src/library/scala/collection/mutable/WrappedArray.scala
+++ b/src/library/scala/collection/mutable/WrappedArray.scala
@@ -93,7 +93,7 @@ object WrappedArray {
def empty[T <: AnyRef]: WrappedArray[T] = EmptyWrappedArray.asInstanceOf[WrappedArray[T]]
// If make is called explicitly we use whatever we're given, even if it's
- // empty. This may be unnecesssary (if WrappedArray is to honor the collections
+ // empty. This may be unnecessary (if WrappedArray is to honor the collections
// contract all empty ones must be equal, so discriminating based on the reference
// equality of an empty array should not come up) but we may as well be
// conservative since wrapRefArray contributes most of the unnecessary allocations.
diff --git a/src/library/scala/concurrent/JavaConversions.scala b/src/library/scala/concurrent/JavaConversions.scala
index 3d0597ca22..91e55d30cb 100644
--- a/src/library/scala/concurrent/JavaConversions.scala
+++ b/src/library/scala/concurrent/JavaConversions.scala
@@ -11,7 +11,7 @@ package scala.concurrent
import java.util.concurrent.{ExecutorService, Executor}
import scala.language.implicitConversions
-/** The `JavaConversions` object provides implicit converstions supporting
+/** The `JavaConversions` object provides implicit conversions supporting
* interoperability between Scala and Java concurrency classes.
*
* @author Philipp Haller
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index cf95f945ba..d6e2963ad8 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -431,7 +431,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
* with unequal `hashCode`s. These hash codes agree with `BigInt`
* for whole numbers up ~4934 digits (the range of IEEE 128 bit floating
* point). Beyond this, hash codes will disagree; this prevents the
- * explicit represention of the `BigInt` form for `BigDecimal` values
+ * explicit representation of the `BigInt` form for `BigDecimal` values
* with large exponents.
*/
override def hashCode(): Int = {
diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala
index b31bbf0540..066b2f5373 100644
--- a/src/library/scala/sys/process/BasicIO.scala
+++ b/src/library/scala/sys/process/BasicIO.scala
@@ -203,7 +203,7 @@ object BasicIO {
/** Returns a `ProcessIO` connected to stdout and stderr, and, optionally, stdin. */
def standard(connectInput: Boolean): ProcessIO = standard(input(connectInput))
- /** Retruns a `ProcessIO` connected to stdout, stderr and the provided `in` */
+ /** Returns a `ProcessIO` connected to stdout, stderr and the provided `in` */
def standard(in: OutputStream => Unit): ProcessIO = new ProcessIO(in, toStdOut, toStdErr)
/** Send all the input from the stream to stderr, and closes the input stream
diff --git a/src/library/scala/sys/process/ProcessLogger.scala b/src/library/scala/sys/process/ProcessLogger.scala
index ae347221ef..6072894007 100644
--- a/src/library/scala/sys/process/ProcessLogger.scala
+++ b/src/library/scala/sys/process/ProcessLogger.scala
@@ -88,7 +88,7 @@ object ProcessLogger {
/** Creates a [[scala.sys.process.ProcessLogger]] that sends all output to the corresponding
* function.
*
- * @param fout This function will receive standard outpout.
+ * @param fout This function will receive standard output.
*
* @param ferr This function will receive standard error.
*/
diff --git a/src/library/scala/util/control/Exception.scala b/src/library/scala/util/control/Exception.scala
index be6d03a145..aa30887ba0 100644
--- a/src/library/scala/util/control/Exception.scala
+++ b/src/library/scala/util/control/Exception.scala
@@ -155,7 +155,7 @@ object Exception {
/** A `Catch` object which catches everything. */
final def allCatch[T]: Catch[T] = new Catch(allCatcher[T]) withDesc "<everything>"
- /** A `Catch` object witch catches non-fatal exceptions. */
+ /** A `Catch` object which catches non-fatal exceptions. */
final def nonFatalCatch[T]: Catch[T] = new Catch(nonFatalCatcher[T]) withDesc "<non-fatal>"
/** Creates a `Catch` object which will catch any of the supplied exceptions.
diff --git a/src/library/scala/util/hashing/MurmurHash3.scala b/src/library/scala/util/hashing/MurmurHash3.scala
index 1bfaeb255b..4e5537954f 100644
--- a/src/library/scala/util/hashing/MurmurHash3.scala
+++ b/src/library/scala/util/hashing/MurmurHash3.scala
@@ -191,7 +191,7 @@ private[hashing] class MurmurHash3 {
* This is based on the earlier MurmurHash3 code by Rex Kerr, but the
* MurmurHash3 algorithm was since changed by its creator Austin Appleby
* to remedy some weaknesses and improve performance. This represents the
- * latest and supposedly final version of the algortihm (revision 136).
+ * latest and supposedly final version of the algorithm (revision 136).
*
* @see [[http://code.google.com/p/smhasher]]
*/
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 5c4e706dc1..6d3d015b1a 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -477,7 +477,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
/**
* Replaces some of the matches using a replacer function that returns an [[scala.Option]].
* The replacer function takes a [[scala.util.matching.Regex.Match]] so that extra
- * information can be btained from the match. For example:
+ * information can be obtained from the match. For example:
*
* {{{
* import scala.util.matching.Regex._
diff --git a/src/partest-extras/scala/tools/partest/ASMConverters.scala b/src/partest-extras/scala/tools/partest/ASMConverters.scala
index 67a4e8ae01..f6e2d2a9ec 100644
--- a/src/partest-extras/scala/tools/partest/ASMConverters.scala
+++ b/src/partest-extras/scala/tools/partest/ASMConverters.scala
@@ -89,7 +89,7 @@ object ASMConverters {
private def lst[T](xs: java.util.List[T]): List[T] = if (xs == null) Nil else xs.asScala.toList
- // Heterogenous List[Any] is used in FrameNode: type information about locals / stack values
+ // Heterogeneous List[Any] is used in FrameNode: type information about locals / stack values
// are stored in a List[Any] (Integer, String or LabelNode), see Javadoc of MethodNode#visitFrame.
// Opcodes (eg Opcodes.INTEGER) and Reference types (eg "java/lang/Object") are returned unchanged,
// LabelNodes are mapped to their LabelEntry.
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 756ed870ca..c86d08e925 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -936,7 +936,7 @@ trait Definitions extends api.StandardDefinitions {
// TODO these aren't final! They are now overridden in AnyRef/Object. Prior to the fix
// for SI-8129, they were actually *overloaded* by the members in AnyRef/Object.
// We should unfinalize these, override in AnyValClass, and make the overrides final.
- // Refchecks never actually looks at these, so its just for consistency.
+ // Refchecks never actually looks at these, so it's just for consistency.
lazy val Any_== = enterNewMethod(AnyClass, nme.EQ, AnyTpe :: Nil, BooleanTpe, FINAL)
lazy val Any_!= = enterNewMethod(AnyClass, nme.NE, AnyTpe :: Nil, BooleanTpe, FINAL)
diff --git a/src/reflect/scala/reflect/internal/ReificationSupport.scala b/src/reflect/scala/reflect/internal/ReificationSupport.scala
index c418321234..eddfec82e7 100644
--- a/src/reflect/scala/reflect/internal/ReificationSupport.scala
+++ b/src/reflect/scala/reflect/internal/ReificationSupport.scala
@@ -292,7 +292,7 @@ trait ReificationSupport { self: SymbolTable =>
if (ctorMods.isTrait)
result(ctorMods, Nil, edefs, body)
else {
- // undo conversion from (implicit ... ) to ()(implicit ... ) when its the only parameter section
+ // undo conversion from (implicit ... ) to ()(implicit ... ) when it's the only parameter section
val vparamssRestoredImplicits = ctorVparamss match {
case Nil :: (tail @ ((head :: _) :: _)) if head.mods.isImplicit => tail
case other => other
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 4a39712ad7..e9cbfd54eb 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -2061,7 +2061,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
final def outerClass: Symbol =
if (this == NoSymbol) {
- // ideally we shouldn't get here, but its better to harden against this than suffer the infinite loop in SI-9133
+ // ideally we shouldn't get here, but it's better to harden against this than suffer the infinite loop in SI-9133
devWarningDumpStack("NoSymbol.outerClass", 15)
NoSymbol
} else if (owner.isClass) owner
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index 75a1969d22..b2248ad518 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -365,7 +365,7 @@ abstract class TreeGen {
DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(lvdefs, Literal(Constant(()))))))
}
else {
- // convert (implicit ... ) to ()(implicit ... ) if its the only parameter section
+ // convert (implicit ... ) to ()(implicit ... ) if it's the only parameter section
if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit)
vparamss1 = List() :: vparamss1
val superCall = pendingSuperCall // we can't know in advance which of the parents will end up as a superclass
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 86a53a1b02..f74d976b82 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -3658,7 +3658,7 @@ trait Types
// JZ: We used to register this as a perRunCache so it would be cleared eagerly at
// the end of the compilation run. But, that facility didn't actually clear this map (SI-8129)!
// When i fixed that bug, run/tpeCache-tyconCache.scala started failing. Why was that?
- // I've removed the registration for now. I don't think its particularly harmful anymore
+ // I've removed the registration for now. I don't think it's particularly harmful anymore
// as a) this is now a weak set, and b) it is discarded completely before the next run.
uniqueRunId = currentRunId
}
@@ -4535,7 +4535,7 @@ trait Types
/** Adds the @uncheckedBound annotation if the given `tp` has type arguments */
final def uncheckedBounds(tp: Type): Type = {
- if (tp.typeArgs.isEmpty || UncheckedBoundsClass == NoSymbol) tp // second condition for backwards compatibilty with older scala-reflect.jar
+ if (tp.typeArgs.isEmpty || UncheckedBoundsClass == NoSymbol) tp // second condition for backwards compatibility with older scala-reflect.jar
else tp.withAnnotation(AnnotationInfo marker UncheckedBoundsClass.tpe)
}
diff --git a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala
index 42b13944f6..83a5d23e7c 100644
--- a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala
+++ b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala
@@ -155,7 +155,7 @@ trait FindMembers {
&& ( (member.owner eq other.owner) // same owner, therefore overload
|| (member.flags & PRIVATE) != 0 // (unqualified) private members never participate in overriding
|| (other.flags & PRIVATE) != 0 // ... as overrider or overridee.
- || !(memberTypeLow(member) matches memberTypeHi(other)) // do the member types match? If so, its an override. Otherwise it's an overload.
+ || !(memberTypeLow(member) matches memberTypeHi(other)) // do the member types match? If so, it's an override. Otherwise it's an overload.
)
)
diff --git a/src/scaladoc/scala/tools/nsc/doc/Index.scala b/src/scaladoc/scala/tools/nsc/doc/Index.scala
index 84545e9201..a11ca38a86 100644
--- a/src/scaladoc/scala/tools/nsc/doc/Index.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Index.scala
@@ -11,4 +11,6 @@ trait Index {
type SymbolMap = SortedMap[String, SortedSet[model.MemberEntity]]
def firstLetterIndex: Map[Char, SymbolMap]
+
+ def hasDeprecatedMembers: Boolean
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
index 61ab18d42d..8313d842e5 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -123,6 +123,8 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
new page.Index(universe, index) writeFor this
new page.IndexScript(universe, index) writeFor this
+ if (index.hasDeprecatedMembers)
+ new page.DeprecatedIndex(universe, index) writeFor this
try {
writeTemplates(_ writeFor this)
for (letter <- index.firstLetterIndex) {
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/DeprecatedIndex.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/DeprecatedIndex.scala
new file mode 100755
index 0000000000..f257153bd7
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/DeprecatedIndex.scala
@@ -0,0 +1,58 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ */
+
+package scala
+package tools
+package nsc
+package doc
+package html
+package page
+
+import doc.model._
+
+class DeprecatedIndex(universe: Universe, index: doc.Index) extends HtmlPage {
+
+ def path = List("deprecated-list.html")
+
+ def title = {
+ val s = universe.settings
+ ( if (!s.doctitle.isDefault) s.doctitle.value else "" ) +
+ ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" )
+ }
+
+ def headers =
+ <xml:group>
+ <link href={ relativeLinkTo(List("ref-index.css", "lib")) } media="screen" type="text/css" rel="stylesheet"/>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
+ </xml:group>
+
+
+ private def entry(name: String, methods: Iterable[MemberEntity]) = {
+ val occurrences = methods.filter(_.deprecation.isDefined).map(method =>
+ templateToHtml(method.inDefinitionTemplates.head)
+ ).toList.distinct
+
+ <div class="entry">
+ <div class="name">{ name }</div>
+ <div class="occurrences">{
+ for (owner <- occurrences) yield owner ++ scala.xml.Text(" ")
+ }</div>
+ </div>
+ }
+
+ def deprecatedEntries = {
+ val available = ('_' +: ('a' to 'z')).flatMap(index.firstLetterIndex.get)
+
+ for (group <- available;
+ value <- group if value._2.find(_.deprecation.isDefined).isDefined)
+ yield value
+ }
+
+ def body =
+ <body>{
+ for(value <- deprecatedEntries) yield
+ entry(value._1, value._2.view)
+ }</body>
+
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
index ce3a5eb1fc..6bfe480e33 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
@@ -61,12 +61,17 @@ class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
}
}
+ def deprecated: NodeSeq = if (index.hasDeprecatedMembers)
+ <a target="template" href="deprecated-list.html">deprecated</a>
+ else
+ <span>deprecated</span>
+
def browser =
<div id="browser" class="ui-layout-west">
<div class="ui-west-center">
<div id="filter">
<div id="textfilter"></div>
- <div id="letters">{ letters }</div>
+ <div id="letters">{ letters } &#8211; { deprecated }</div>
</div>
<div class="pack" id="tpl">{
def packageElem(pack: model.Package): NodeSeq = {
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
index 643a089aae..ad53dc6bfa 100755
--- a/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
@@ -14,10 +14,12 @@ object IndexModelFactory {
def makeIndex(universe: Universe): Index = new Index {
- lazy val firstLetterIndex: Map[Char, SymbolMap] = {
+ lazy val (firstLetterIndex, hasDeprecatedMembers): (Map[Char, SymbolMap], Boolean) = {
object result extends mutable.HashMap[Char,SymbolMap] {
+ var deprecated = false
+
/* symbol name ordering */
implicit def orderingMap = math.Ordering.String
@@ -32,6 +34,8 @@ object IndexModelFactory {
val members = letter.get(d.name).getOrElse {
SortedSet.empty[MemberEntity](Ordering.by { _.toString })
} + d
+ if (!deprecated && members.find(_.deprecation.isDefined).isDefined)
+ deprecated = true
this(firstLetter) = letter + (d.name -> members)
}
}
@@ -50,7 +54,7 @@ object IndexModelFactory {
gather(universe.rootPackage)
- result.toMap
+ (result.toMap, result.deprecated)
}
}
}
diff --git a/test/files/jvm/inner.scala b/test/files/jvm/inner.scala
index c05e803449..dc01b124c5 100644
--- a/test/files/jvm/inner.scala
+++ b/test/files/jvm/inner.scala
@@ -77,7 +77,7 @@ object Scalatest {
def java(cname: String) =
exec(javacmd, "-cp", classpath, cname)
- /** Execute cmd, wait for the process to end and pipe it's output to stdout */
+ /** Execute cmd, wait for the process to end and pipe its output to stdout */
private def exec(args: String*) {
val proc = Runtime.getRuntime().exec(args.toArray)
val inp = new BufferedReader(new InputStreamReader(proc.getInputStream))
diff --git a/test/files/jvm/javaReflection/Test.scala b/test/files/jvm/javaReflection/Test.scala
index 5b6ef1b573..ae5a36eeb2 100644
--- a/test/files/jvm/javaReflection/Test.scala
+++ b/test/files/jvm/javaReflection/Test.scala
@@ -124,8 +124,8 @@ object Test {
// exclude files from Test.scala, just take those from Classes_1.scala
case s if !s.startsWith("Test") && s.endsWith(".class") => s.substring(0, s.length - 6)
}).sortWith((a, b) => {
- // sort such that first there are all anonymous funcitions, then all other classes.
- // within those cathegories, sort lexically.
+ // sort such that first there are all anonymous functions, then all other classes.
+ // within those categories, sort lexically.
// this makes the check file smaller: it differs for anonymous functions between -Ydelambdafy:inline/method.
// the other classes are the same.
if (isAnonFunClassName(a)) !isAnonFunClassName(b) || a < b
diff --git a/test/files/pos/t7815.scala b/test/files/pos/t7815.scala
index 12a434c5b0..0a126f9faa 100644
--- a/test/files/pos/t7815.scala
+++ b/test/files/pos/t7815.scala
@@ -21,7 +21,7 @@ object Foo {
object Main extends App {
def mapWithFoo[A <: AnyRef, B](as: List[A])(implicit foo: Foo.Aux[A, B]) = {
// Should be Eta expandable because the result type of `f` is not
- // dependant on the value, it is just `B`.
+ // dependent on the value, it is just `B`.
as map foo.f
as map foo.g
as map foo.m
diff --git a/test/files/run/classfile-format-51.scala b/test/files/run/classfile-format-51.scala
index 24b1ee8397..7523130afa 100644
--- a/test/files/run/classfile-format-51.scala
+++ b/test/files/run/classfile-format-51.scala
@@ -12,7 +12,7 @@ import Opcodes._
// it runs a normal compile on the source in the 'code' field that refers to
// DynamicInvoker. Any failure will be dumped to std out.
//
-// By it's nature the test can only work on JDK 7+ because under JDK 6 some of the
+// By its nature the test can only work on JDK 7+ because under JDK 6 some of the
// classes referred to by DynamicInvoker won't be available and DynamicInvoker won't
// verify. So the test includes a version check that short-circuites the whole test
// on JDK 6
diff --git a/test/files/run/classfile-format-52.scala b/test/files/run/classfile-format-52.scala
index e12c84124c..453f61ac84 100644
--- a/test/files/run/classfile-format-52.scala
+++ b/test/files/run/classfile-format-52.scala
@@ -11,7 +11,7 @@ import Opcodes._
// HasDefaultMethod. Then it runs a normal compile on Scala source that extends that
// interface. Any failure will be dumped to std out.
//
-// By it's nature the test can only work on JDK 8+ because under JDK 7- the
+// By its nature the test can only work on JDK 8+ because under JDK 7- the
// interface won't verify.
object Test extends DirectTest {
override def extraSettings: String = "-optimise -usejavacp -d " + testOutput.path + " -cp " + testOutput.path
diff --git a/test/files/run/t7741a/Test.scala b/test/files/run/t7741a/Test.scala
index a75cb6c9eb..cdba1cccf8 100644
--- a/test/files/run/t7741a/Test.scala
+++ b/test/files/run/t7741a/Test.scala
@@ -25,7 +25,7 @@ object Test extends DirectTest {
// interface GroovyInterface {
//
// // This is the line that causes scalac to choke.
- // // It results in a GroovyInterface$1 class, which is a non-static inner class but it's constructor does not
+ // // It results in a GroovyInterface$1 class, which is a non-static inner class but its constructor does not
// // include the implicit parameter that is the immediate enclosing instance.
// // See http://jira.codehaus.org/browse/GROOVY-7312
// //
diff --git a/test/files/run/valueClassSelfType.scala b/test/files/run/valueClassSelfType.scala
new file mode 100644
index 0000000000..47a3764b0a
--- /dev/null
+++ b/test/files/run/valueClassSelfType.scala
@@ -0,0 +1,52 @@
+trait T
+
+class V1(val l: Long) extends AnyVal { self: T =>
+ def foo: V1 = self
+ def bar: T = self
+}
+
+class V2(val l: Long) extends AnyVal { self =>
+ def foo: V2 = self
+}
+
+class V3(val l: Long) extends AnyVal { self: Long =>
+ def foo: V3 = self
+ def bar: Long = self
+}
+
+// non-value classes
+
+class C1(val l: Long) { self: T =>
+ def foo: C1 = self
+ def bar: T = self
+}
+
+class C2(val l: Long) { self =>
+ def foo: C2 = self
+}
+
+class C3(val l: Long) { self: Long =>
+ def foo: C3 = self
+ def bar: Long = self
+}
+
+object Test extends App {
+ // Rejected: superclass V1 is not a subclass of the superclass Object of the mixin trait T
+ // new V1(1l) with T
+
+ assert(new V2(1l).foo.l == 1l)
+
+ // Rejected: V3 does not conform to its self-type V3 with Long
+ // new V3(1l)
+
+ val c2 = new C1(2l) with T
+ assert(c2.foo.l + c2.bar.asInstanceOf[C1].l == 4l)
+
+ assert(new C2(3l).foo.l == 3l)
+
+ // Rejected: C3 does not conform to its self-type C3 with Long
+ // new C3(4l)
+
+ // Rejected: class Long needs to be a trait to be mixed in
+ // new C3(4l) with Long
+}
diff --git a/test/junit/scala/collection/mutable/VectorTest.scala b/test/junit/scala/collection/mutable/VectorTest.scala
index e9c4d44a72..b3219d1b02 100644
--- a/test/junit/scala/collection/mutable/VectorTest.scala
+++ b/test/junit/scala/collection/mutable/VectorTest.scala
@@ -38,7 +38,6 @@ class VectorTest {
def iteratorCat() {
def its = vecs.map(_.toList.toIterator)
val cats = vecs.map(a => its.map(a ++ _))
- println(cats)
assert( cats == ans )
}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala
index 17724aecb1..0fc3601603 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala
@@ -720,7 +720,7 @@ class InlinerTest extends ClearAfterClass {
|final class D extends C
|object E extends C
|class T {
- | def t1(d: D) = d.f + d.g + E.f + E.g // d.f can be inlined because the reciever type is D, which is final.
+ | def t1(d: D) = d.f + d.g + E.f + E.g // d.f can be inlined because the receiver type is D, which is final.
|} // so d.f can be resolved statically. same for E.f
""".stripMargin
val List(c, d, e, eModule, t) = compile(code)
diff --git a/test/pending/jvm/javasigs.scala b/test/pending/jvm/javasigs.scala
index 8da59ab0a0..d18a4e6fb5 100644
--- a/test/pending/jvm/javasigs.scala
+++ b/test/pending/jvm/javasigs.scala
@@ -32,7 +32,7 @@ object Scalatest {
}
- /** Execute cmd, wait for the process to end and pipe it's output to stdout */
+ /** Execute cmd, wait for the process to end and pipe its output to stdout */
def exec(cmd: String) {
val proc = Runtime.getRuntime().exec(cmd)
val inp = new BufferedReader(new InputStreamReader(proc.getInputStream))
diff --git a/test/scaladoc/resources/SI-4476.scala b/test/scaladoc/resources/SI-4476.scala
new file mode 100644
index 0000000000..eb35ef45e7
--- /dev/null
+++ b/test/scaladoc/resources/SI-4476.scala
@@ -0,0 +1,9 @@
+package foo
+
+@deprecated("","")
+class A
+
+class B {
+ @deprecated("","")
+ def bar = 1
+}
diff --git a/test/scaladoc/resources/Trac4420.scala b/test/scaladoc/resources/Trac4420.scala
index dbe053f3da..d8e207876b 100644
--- a/test/scaladoc/resources/Trac4420.scala
+++ b/test/scaladoc/resources/Trac4420.scala
@@ -1,7 +1,7 @@
import java.io.File
/**
- * @define PP This class is an instance of XXX so it's members are not called directly.
+ * @define PP This class is an instance of XXX so its members are not called directly.
* Instead these classes are instantiated via a driver's ''process''. See YYY for more details. */
abstract class test
diff --git a/test/scaladoc/scalacheck/DeprecatedIndexTest.scala b/test/scaladoc/scalacheck/DeprecatedIndexTest.scala
new file mode 100644
index 0000000000..4a5a2001d4
--- /dev/null
+++ b/test/scaladoc/scalacheck/DeprecatedIndexTest.scala
@@ -0,0 +1,50 @@
+import org.scalacheck._
+import org.scalacheck.Prop._
+
+import scala.tools.nsc.doc
+import scala.tools.nsc.doc.html.page.DeprecatedIndex
+import java.net.{URLClassLoader, URLDecoder}
+
+object Test extends Properties("IndexScript") {
+
+ def getClasspath = {
+ // these things can be tricky
+ // this test previously relied on the assumption that the current thread's classloader is an url classloader and contains all the classpaths
+ // does partest actually guarantee this? to quote Leonard Nimoy: The answer, of course, is no.
+ // this test _will_ fail again some time in the future.
+ // Footnote: java.lang.ClassCastException: org.apache.tools.ant.loader.AntClassLoader5 cannot be cast to java.net.URLClassLoader
+ val loader = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader]
+ val paths = loader.getURLs.map(u => URLDecoder.decode(u.getPath))
+ paths mkString java.io.File.pathSeparator
+ }
+
+ val docFactory = {
+ val settings = new doc.Settings({Console.err.println(_)})
+ settings.scaladocQuietRun = true
+ settings.nowarn.value = true
+ settings.classpath.value = getClasspath
+ val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
+ new doc.DocFactory(reporter, settings)
+ }
+
+ val indexModelFactory = doc.model.IndexModelFactory
+
+ def createDeprecatedScript(path: String) =
+ docFactory.makeUniverse(Left(List(path))) match {
+ case Some(universe) => {
+ val index = new DeprecatedIndex(universe, indexModelFactory.makeIndex(universe))
+ Some(index)
+ }
+ case _ =>
+ None
+ }
+
+ property("deprecated-list page lists deprecated members") = {
+ createDeprecatedScript("test/scaladoc/resources/SI-4476.scala") match {
+ case Some(p) =>
+ p.deprecatedEntries.find(_._1 == "A").isDefined &&
+ p.deprecatedEntries.find(_._1 == "bar").isDefined
+ case None => false
+ }
+ }
+}
diff --git a/test/scaladoc/scalacheck/IndexTest.scala b/test/scaladoc/scalacheck/IndexTest.scala
index abc0e5da01..7dbd2103a6 100644
--- a/test/scaladoc/scalacheck/IndexTest.scala
+++ b/test/scaladoc/scalacheck/IndexTest.scala
@@ -71,7 +71,7 @@ object Test extends Properties("Index") {
case None => false
}
}
- property("browser contants a script element") = {
+ property("browser contains a script element") = {
createIndex("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
case Some(index) =>
(index.browser \ "script").size == 1
@@ -86,4 +86,10 @@ object Test extends Properties("Index") {
case None => false
}
}
+ property("index should report if there are deprecated members") = {
+ createIndex("test/scaladoc/resources/SI-4476.scala") match {
+ case Some(indexPage) => indexPage.index.hasDeprecatedMembers
+ case None => false
+ }
+ }
}
diff --git a/tools/scaladoc-compare b/tools/scaladoc-compare
index 74fbfd1dd4..46e1b75a19 100755
--- a/tools/scaladoc-compare
+++ b/tools/scaladoc-compare
@@ -7,7 +7,7 @@ if [ $# -ne 2 ]
then
echo
echo "scaladoc-compare will compare the scaladoc-generated pages in two different locations and output the diff"
- echo "it's main purpose is to track changes to scaladoc and prevent updates that break things."
+ echo "its main purpose is to track changes to scaladoc and prevent updates that break things."
echo
echo "This script is meant to be used with the scaladoc -raw-output option, as it compares .html.raw files "
echo "instead of markup-heavy .html files."