aboutsummaryrefslogtreecommitdiff
path: root/project
diff options
context:
space:
mode:
authorWeiqing Yang <yangweiqing001@gmail.com>2016-12-03 10:36:19 +0000
committerSean Owen <sowen@cloudera.com>2016-12-03 10:36:19 +0000
commit576197320cba515c41ec2fc4139b94a50206b29f (patch)
tree5da5848743ef12f7a15035ca5cbb07045cabd75a /project
parentd1312fb7edffd6e10c86f69ddfff05f8915856ac (diff)
downloadspark-576197320cba515c41ec2fc4139b94a50206b29f.tar.gz
spark-576197320cba515c41ec2fc4139b94a50206b29f.tar.bz2
spark-576197320cba515c41ec2fc4139b94a50206b29f.zip
[SPARK-18638][BUILD] Upgrade sbt, Zinc, and Maven plugins
## What changes were proposed in this pull request? This PR is to upgrade: ``` sbt: 0.13.11 -> 0.13.13, zinc: 0.3.9 -> 0.3.11, maven-assembly-plugin: 2.6 -> 3.0.0 maven-compiler-plugin: 3.5.1 -> 3.6. maven-jar-plugin: 2.6 -> 3.0.2 maven-javadoc-plugin: 2.10.3 -> 2.10.4 maven-source-plugin: 2.4 -> 3.0.1 org.codehaus.mojo:build-helper-maven-plugin: 1.10 -> 1.12 org.codehaus.mojo:exec-maven-plugin: 1.4.0 -> 1.5.0 ``` The sbt release notes since the last version we used are: [v0.13.12](https://github.com/sbt/sbt/releases/tag/v0.13.12) and [v0.13.13 ](https://github.com/sbt/sbt/releases/tag/v0.13.13). ## How was this patch tested? Pass build and the existing tests. Author: Weiqing Yang <yangweiqing001@gmail.com> Closes #16069 from weiqingy/SPARK-18638.
Diffstat (limited to 'project')
-rw-r--r--project/MimaBuild.scala6
-rw-r--r--project/SparkBuild.scala51
-rw-r--r--project/build.properties2
3 files changed, 30 insertions, 29 deletions
diff --git a/project/MimaBuild.scala b/project/MimaBuild.scala
index 77397eab81..de0655b6cb 100644
--- a/project/MimaBuild.scala
+++ b/project/MimaBuild.scala
@@ -22,7 +22,7 @@ import com.typesafe.tools.mima.core._
import com.typesafe.tools.mima.core.MissingClassProblem
import com.typesafe.tools.mima.core.MissingTypesProblem
import com.typesafe.tools.mima.core.ProblemFilters._
-import com.typesafe.tools.mima.plugin.MimaKeys.{binaryIssueFilters, previousArtifact}
+import com.typesafe.tools.mima.plugin.MimaKeys.{mimaBinaryIssueFilters, mimaPreviousArtifacts}
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings
@@ -92,8 +92,8 @@ object MimaBuild {
val project = projectRef.project
val fullId = "spark-" + project + "_2.11"
mimaDefaultSettings ++
- Seq(previousArtifact := Some(organization % fullId % previousSparkVersion),
- binaryIssueFilters ++= ignoredABIProblems(sparkHome, version.value))
+ Seq(mimaPreviousArtifacts := Set(organization % fullId % previousSparkVersion),
+ mimaBinaryIssueFilters ++= ignoredABIProblems(sparkHome, version.value))
}
}
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index e3fbe0379f..fdc33c77fe 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -251,13 +251,12 @@ object SparkBuild extends PomBuild {
Resolver.file("local", file(Path.userHome.absolutePath + "/.ivy2/local"))(Resolver.ivyStylePatterns)
),
externalResolvers := resolvers.value,
- otherResolvers <<= SbtPomKeys.mvnLocalRepository(dotM2 => Seq(Resolver.file("dotM2", dotM2))),
- publishLocalConfiguration in MavenCompile <<= (packagedArtifacts, deliverLocal, ivyLoggingLevel) map {
- (arts, _, level) => new PublishConfiguration(None, "dotM2", arts, Seq(), level)
- },
+ otherResolvers := SbtPomKeys.mvnLocalRepository(dotM2 => Seq(Resolver.file("dotM2", dotM2))).value,
+ publishLocalConfiguration in MavenCompile :=
+ new PublishConfiguration(None, "dotM2", packagedArtifacts.value, Seq(), ivyLoggingLevel.value),
publishMavenStyle in MavenCompile := true,
- publishLocal in MavenCompile <<= publishTask(publishLocalConfiguration in MavenCompile, deliverLocal),
- publishLocalBoth <<= Seq(publishLocal in MavenCompile, publishLocal).dependOn,
+ publishLocal in MavenCompile := publishTask(publishLocalConfiguration in MavenCompile, deliverLocal).value,
+ publishLocalBoth := Seq(publishLocal in MavenCompile, publishLocal).dependOn.value,
javacOptions in (Compile, doc) ++= {
val versionParts = System.getProperty("java.version").split("[+.\\-]+", 3)
@@ -431,7 +430,8 @@ object SparkBuild extends PomBuild {
val packages :: className :: otherArgs = spaceDelimited("<group:artifact:version> <MainClass> [args]").parsed.toList
val scalaRun = (runner in run).value
val classpath = (fullClasspath in Runtime).value
- val args = Seq("--packages", packages, "--class", className, (Keys.`package` in Compile in "core").value.getCanonicalPath) ++ otherArgs
+ val args = Seq("--packages", packages, "--class", className, (Keys.`package` in Compile in LocalProject("core"))
+ .value.getCanonicalPath) ++ otherArgs
println(args)
scalaRun.run("org.apache.spark.deploy.SparkSubmit", classpath.map(_.data), args, streams.value.log)
},
@@ -443,7 +443,7 @@ object SparkBuild extends PomBuild {
}
))(assembly)
- enable(Seq(sparkShell := sparkShell in "assembly"))(spark)
+ enable(Seq(sparkShell := sparkShell in LocalProject("assembly")))(spark)
// TODO: move this to its upstream project.
override def projectDefinitions(baseDirectory: File): Seq[Project] = {
@@ -512,9 +512,9 @@ object OldDeps {
lazy val project = Project("oldDeps", file("dev"), settings = oldDepsSettings)
- lazy val allPreviousArtifactKeys = Def.settingDyn[Seq[Option[ModuleID]]] {
+ lazy val allPreviousArtifactKeys = Def.settingDyn[Seq[Set[ModuleID]]] {
SparkBuild.mimaProjects
- .map { project => MimaKeys.previousArtifact in project }
+ .map { project => MimaKeys.mimaPreviousArtifacts in project }
.map(k => Def.setting(k.value))
.join
}
@@ -568,9 +568,9 @@ object Hive {
javaOptions in Test := (javaOptions in Test).value.filterNot(_ == "-ea"),
// Supporting all SerDes requires us to depend on deprecated APIs, so we turn off the warnings
// only for this subproject.
- scalacOptions <<= scalacOptions map { currentOpts: Seq[String] =>
+ scalacOptions := (scalacOptions map { currentOpts: Seq[String] =>
currentOpts.filterNot(_ == "-deprecation")
- },
+ }).value,
initialCommands in console :=
"""
|import org.apache.spark.SparkContext
@@ -608,17 +608,18 @@ object Assembly {
sys.props.get("hadoop.version")
.getOrElse(SbtPomKeys.effectivePom.value.getProperties.get("hadoop.version").asInstanceOf[String])
},
- jarName in assembly <<= (version, moduleName, hadoopVersion) map { (v, mName, hv) =>
- if (mName.contains("streaming-flume-assembly") || mName.contains("streaming-kafka-0-8-assembly") || mName.contains("streaming-kafka-0-10-assembly") || mName.contains("streaming-kinesis-asl-assembly")) {
+ jarName in assembly := {
+ if (moduleName.value.contains("streaming-flume-assembly")
+ || moduleName.value.contains("streaming-kafka-0-8-assembly")
+ || moduleName.value.contains("streaming-kafka-0-10-assembly")
+ || moduleName.value.contains("streaming-kinesis-asl-assembly")) {
// This must match the same name used in maven (see external/kafka-0-8-assembly/pom.xml)
- s"${mName}-${v}.jar"
+ s"${moduleName.value}-${version.value}.jar"
} else {
- s"${mName}-${v}-hadoop${hv}.jar"
+ s"${moduleName.value}-${version.value}-hadoop${hadoopVersion.value}.jar"
}
},
- jarName in (Test, assembly) <<= (version, moduleName, hadoopVersion) map { (v, mName, hv) =>
- s"${mName}-test-${v}.jar"
- },
+ jarName in (Test, assembly) := s"${moduleName.value}-test-${version.value}.jar",
mergeStrategy in assembly := {
case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard
case m if m.toLowerCase.matches("meta-inf.*\\.sf$") => MergeStrategy.discard
@@ -639,13 +640,13 @@ object PySparkAssembly {
// Use a resource generator to copy all .py files from python/pyspark into a managed directory
// to be included in the assembly. We can't just add "python/" to the assembly's resource dir
// list since that will copy unneeded / unwanted files.
- resourceGenerators in Compile <+= resourceManaged in Compile map { outDir: File =>
+ resourceGenerators in Compile += Def.macroValueI(resourceManaged in Compile map { outDir: File =>
val src = new File(BuildCommons.sparkHome, "python/pyspark")
val zipFile = new File(BuildCommons.sparkHome , "python/lib/pyspark.zip")
zipFile.delete()
zipRecursive(src, zipFile)
Seq[File]()
- }
+ }).value
)
private def zipRecursive(source: File, destZipFile: File) = {
@@ -771,7 +772,7 @@ object Unidoc {
object CopyDependencies {
val copyDeps = TaskKey[Unit]("copyDeps", "Copies needed dependencies to the build directory.")
- val destPath = (crossTarget in Compile) / "jars"
+ val destPath = (crossTarget in Compile) { _ / "jars"}
lazy val settings = Seq(
copyDeps := {
@@ -791,7 +792,7 @@ object CopyDependencies {
}
},
crossTarget in (Compile, packageBin) := destPath.value,
- packageBin in Compile <<= (packageBin in Compile).dependsOn(copyDeps)
+ packageBin in Compile := (packageBin in Compile).dependsOn(copyDeps).value
)
}
@@ -862,7 +863,7 @@ object TestSettings {
// Only allow one test at a time, even across projects, since they run in the same JVM
parallelExecution in Test := false,
// Make sure the test temp directory exists.
- resourceGenerators in Test <+= resourceManaged in Test map { outDir: File =>
+ resourceGenerators in Test += Def.macroValueI(resourceManaged in Test map { outDir: File =>
var dir = new File(testTempDir)
if (!dir.isDirectory()) {
// Because File.mkdirs() can fail if multiple callers are trying to create the same
@@ -880,7 +881,7 @@ object TestSettings {
}
}
Seq[File]()
- },
+ }).value,
concurrentRestrictions in Global += Tags.limit(Tags.Test, 1),
// Remove certain packages from Scaladoc
scalacOptions in (Compile, doc) := Seq(
diff --git a/project/build.properties b/project/build.properties
index 1e38156e0b..d339865ab9 100644
--- a/project/build.properties
+++ b/project/build.properties
@@ -14,4 +14,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-sbt.version=0.13.11
+sbt.version=0.13.13