aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDhruve Ashar <dhruveashar@gmail.com>2016-06-06 09:42:50 -0700
committerMarcelo Vanzin <vanzin@cloudera.com>2016-06-06 09:42:50 -0700
commitfa4bc8ea8bab1277d1482da370dac79947cac719 (patch)
treeb71ec05ab5c47f58be5e0cc597d91dba31aa56cb
parent00ad4f054cd044e17d29b7c2c62efd8616462619 (diff)
downloadspark-fa4bc8ea8bab1277d1482da370dac79947cac719.tar.gz
spark-fa4bc8ea8bab1277d1482da370dac79947cac719.tar.bz2
spark-fa4bc8ea8bab1277d1482da370dac79947cac719.zip
[SPARK-14279][BUILD] Pick the spark version from pom
## What changes were proposed in this pull request? Change the way spark picks up version information. Also embed the build information to better identify the spark version running. More context can be found here : https://github.com/apache/spark/pull/12152 ## How was this patch tested? Ran the mvn and sbt builds to verify the version information was being displayed correctly on executing <code>spark-submit --version </code> ![image](https://cloud.githubusercontent.com/assets/7732317/15197251/f7c673a2-1795-11e6-8b2f-88f2a70cf1c1.png) Author: Dhruve Ashar <dhruveashar@gmail.com> Closes #13061 from dhruve/impr/SPARK-14279.
-rwxr-xr-xbuild/spark-build-info38
-rw-r--r--core/pom.xml31
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala7
-rw-r--r--core/src/main/scala/org/apache/spark/package.scala55
-rw-r--r--pom.xml6
-rw-r--r--project/SparkBuild.scala21
6 files changed, 150 insertions, 8 deletions
diff --git a/build/spark-build-info b/build/spark-build-info
new file mode 100755
index 0000000000..ad0ec67f45
--- /dev/null
+++ b/build/spark-build-info
@@ -0,0 +1,38 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This script generates the build info for spark and places it into the spark-version-info.properties file.
+# Arguments:
+# build_tgt_directory - The target directory where properties file would be created. [./core/target/extra-resources]
+# spark_version - The current version of spark
+
+RESOURCE_DIR="$1"
+mkdir -p "$RESOURCE_DIR"
+SPARK_BUILD_INFO="${RESOURCE_DIR}"/spark-version-info.properties
+
+echo_build_properties() {
+ echo version=$1
+ echo user=$USER
+ echo revision=$(git rev-parse HEAD)
+ echo branch=$(git rev-parse --abbrev-ref HEAD)
+ echo date=$(date -u +%Y-%m-%dT%H:%M:%SZ)
+ echo url=$(git config --get remote.origin.url)
+}
+
+echo_build_properties $2 > "$SPARK_BUILD_INFO"
diff --git a/core/pom.xml b/core/pom.xml
index 45f8bfcc05..f5fdb40696 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -337,9 +337,40 @@
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
+ <resources>
+ <resource>
+ <directory>${project.basedir}/src/main/resources</directory>
+ </resource>
+ <resource>
+ <!-- Include the properties file to provide the build information. -->
+ <directory>${project.build.directory}/extra-resources</directory>
+ <filtering>true</filtering>
+ </resource>
+ </resources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <phase>generate-resources</phase>
+ <configuration>
+ <!-- Execute the shell script to generate the spark build information. -->
+ <tasks>
+ <exec executable="${project.basedir}/../build/spark-build-info">
+ <arg value="${project.build.directory}/extra-resources"/>
+ <arg value="${pom.version}"/>
+ </exec>
+ </tasks>
+ </configuration>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<!-- When using SPARK_PREPEND_CLASSES Spark classes compiled locally don't use
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index 9be4cadcb4..9feafc99ac 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -40,7 +40,8 @@ import org.apache.ivy.plugins.matcher.GlobPatternMatcher
import org.apache.ivy.plugins.repository.file.FileRepository
import org.apache.ivy.plugins.resolver.{ChainResolver, FileSystemResolver, IBiblioResolver}
-import org.apache.spark.{SPARK_VERSION, SparkException, SparkUserAppException}
+import org.apache.spark.{SPARK_REVISION, SPARK_VERSION, SparkException, SparkUserAppException}
+import org.apache.spark.{SPARK_BRANCH, SPARK_BUILD_DATE, SPARK_BUILD_USER, SPARK_REPO_URL}
import org.apache.spark.api.r.RUtils
import org.apache.spark.deploy.rest._
import org.apache.spark.launcher.SparkLauncher
@@ -103,6 +104,10 @@ object SparkSubmit {
/___/ .__/\_,_/_/ /_/\_\ version %s
/_/
""".format(SPARK_VERSION))
+ printStream.println("Branch %s".format(SPARK_BRANCH))
+ printStream.println("Compiled by user %s on %s".format(SPARK_BUILD_USER, SPARK_BUILD_DATE))
+ printStream.println("Revision %s".format(SPARK_REVISION))
+ printStream.println("Url %s".format(SPARK_REPO_URL))
printStream.println("Type --help for more information.")
exitFn(0)
}
diff --git a/core/src/main/scala/org/apache/spark/package.scala b/core/src/main/scala/org/apache/spark/package.scala
index cc5e7ef3ae..2610d6f6e4 100644
--- a/core/src/main/scala/org/apache/spark/package.scala
+++ b/core/src/main/scala/org/apache/spark/package.scala
@@ -41,7 +41,58 @@ package org.apache
* level interfaces. These are subject to changes or removal in minor releases.
*/
+import java.util.Properties
+
package object spark {
- // For package docs only
- val SPARK_VERSION = "2.0.0-SNAPSHOT"
+
+ private object SparkBuildInfo {
+
+ val (
+ spark_version: String,
+ spark_branch: String,
+ spark_revision: String,
+ spark_build_user: String,
+ spark_repo_url: String,
+ spark_build_date: String) = {
+
+ val resourceStream = Thread.currentThread().getContextClassLoader.
+ getResourceAsStream("spark-version-info.properties")
+
+ try {
+ val unknownProp = "<unknown>"
+ val props = new Properties()
+ props.load(resourceStream)
+ (
+ props.getProperty("version", unknownProp),
+ props.getProperty("branch", unknownProp),
+ props.getProperty("revision", unknownProp),
+ props.getProperty("user", unknownProp),
+ props.getProperty("url", unknownProp),
+ props.getProperty("date", unknownProp)
+ )
+ } catch {
+ case npe: NullPointerException =>
+ throw new SparkException("Error while locating file spark-version-info.properties", npe)
+ case e: Exception =>
+ throw new SparkException("Error loading properties from spark-version-info.properties", e)
+ } finally {
+ if (resourceStream != null) {
+ try {
+ resourceStream.close()
+ } catch {
+ case e: Exception =>
+ throw new SparkException("Error closing spark build info resource stream", e)
+ }
+ }
+ }
+ }
+ }
+
+ val SPARK_VERSION = SparkBuildInfo.spark_version
+ val SPARK_BRANCH = SparkBuildInfo.spark_branch
+ val SPARK_REVISION = SparkBuildInfo.spark_revision
+ val SPARK_BUILD_USER = SparkBuildInfo.spark_build_user
+ val SPARK_REPO_URL = SparkBuildInfo.spark_repo_url
+ val SPARK_BUILD_DATE = SparkBuildInfo.spark_build_date
}
+
diff --git a/pom.xml b/pom.xml
index 60c8c8dc7a..6c67452adf 100644
--- a/pom.xml
+++ b/pom.xml
@@ -180,6 +180,8 @@
<antlr4.version>4.5.3</antlr4.version>
<jpam.version>1.1</jpam.version>
<selenium.version>2.52.0</selenium.version>
+ <paranamer.version>2.8</paranamer.version>
+ <maven-antrun.version>1.8</maven-antrun.version>
<test.java.home>${java.home}</test.java.home>
<test.exclude.tags></test.exclude.tags>
@@ -2061,7 +2063,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
- <version>1.8</version>
+ <version>${maven-antrun.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
@@ -2184,7 +2186,7 @@
<pluginExecutionFilter>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
- <versionRange>[1.8,)</versionRange>
+ <versionRange>[${maven-antrun.version},)</versionRange>
<goals>
<goal>run</goal>
</goals>
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 744f57c517..304288a32c 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -360,6 +360,9 @@ object SparkBuild extends PomBuild {
enable(MimaBuild.mimaSettings(sparkHome, x))(x)
}
+ /* Generate and pick the spark build info from extra-resources */
+ enable(Core.settings)(core)
+
/* Unsafe settings */
enable(Unsafe.settings)(unsafe)
@@ -448,7 +451,19 @@ object SparkBuild extends PomBuild {
else x.settings(Seq[Setting[_]](): _*)
} ++ Seq[Project](OldDeps.project)
}
+}
+object Core {
+ lazy val settings = Seq(
+ resourceGenerators in Compile += Def.task {
+ val buildScript = baseDirectory.value + "/../build/spark-build-info"
+ val targetDir = baseDirectory.value + "/target/extra-resources/"
+ val command = buildScript + " " + targetDir + " " + version.value
+ Process(command).!!
+ val propsFile = baseDirectory.value / "target" / "extra-resources" / "spark-version-info.properties"
+ Seq(propsFile)
+ }.taskValue
+ )
}
object Unsafe {
@@ -480,9 +495,9 @@ object DependencyOverrides {
}
/**
- This excludes library dependencies in sbt, which are specified in maven but are
- not needed by sbt build.
- */
+ * This excludes library dependencies in sbt, which are specified in maven but are
+ * not needed by sbt build.
+ */
object ExcludedDependencies {
lazy val settings = Seq(
libraryDependencies ~= { libs => libs.filterNot(_.name == "groovy-all") }