aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xdev/run-tests.py12
-rw-r--r--pom.xml1
-rw-r--r--project/SparkBuild.scala14
3 files changed, 12 insertions, 15 deletions
diff --git a/dev/run-tests.py b/dev/run-tests.py
index 450b68123e..818a0c9f48 100755
--- a/dev/run-tests.py
+++ b/dev/run-tests.py
@@ -365,8 +365,16 @@ def build_spark_assembly_sbt(hadoop_version):
print("[info] Building Spark assembly (w/Hive 1.2.1) using SBT with these arguments: ",
" ".join(profiles_and_goals))
exec_sbt(profiles_and_goals)
- # Make sure that Java and Scala API documentation can be generated
- build_spark_unidoc_sbt(hadoop_version)
+
+ # Note that we skip Unidoc build only if Hadoop 2.6 is explicitly set in this SBT build.
+ # Due to a different dependency resolution in SBT & Unidoc by an unknown reason, the
+ # documentation build fails on a specific machine & environment in Jenkins but it was unable
+ # to reproduce. Please see SPARK-20343. This is a band-aid fix that should be removed in
+ # the future.
+ is_hadoop_version_2_6 = os.environ.get("AMPLAB_JENKINS_BUILD_PROFILE") == "hadoop2.6"
+ if not is_hadoop_version_2_6:
+ # Make sure that Java and Scala API documentation can be generated
+ build_spark_unidoc_sbt(hadoop_version)
def build_apache_spark(build_tool, hadoop_version):
diff --git a/pom.xml b/pom.xml
index 14370d92a9..c1174593c1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -142,7 +142,6 @@
<ivy.version>2.4.0</ivy.version>
<oro.version>2.0.8</oro.version>
<codahale.metrics.version>3.1.2</codahale.metrics.version>
- <!-- Keep consistent with Avro vesion in SBT build for SPARK-20343 -->
<avro.version>1.7.7</avro.version>
<avro.mapred.classifier>hadoop2</avro.mapred.classifier>
<jets3t.version>0.9.3</jets3t.version>
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 77dae289f7..e52baf51ae 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -318,8 +318,8 @@ object SparkBuild extends PomBuild {
enable(MimaBuild.mimaSettings(sparkHome, x))(x)
}
- /* Generate and pick the spark build info from extra-resources and override a dependency */
- enable(Core.settings ++ CoreDependencyOverrides.settings)(core)
+ /* Generate and pick the spark build info from extra-resources */
+ enable(Core.settings)(core)
/* Unsafe settings */
enable(Unsafe.settings)(unsafe)
@@ -444,16 +444,6 @@ object DockerIntegrationTests {
}
/**
- * Overrides to work around sbt's dependency resolution being different from Maven's in Unidoc.
- *
- * Note that, this is a hack that should be removed in the future. See SPARK-20343
- */
-object CoreDependencyOverrides {
- lazy val settings = Seq(
- dependencyOverrides += "org.apache.avro" % "avro" % "1.7.7")
-}
-
-/**
* Overrides to work around sbt's dependency resolution being different from Maven's.
*/
object DependencyOverrides {