aboutsummaryrefslogtreecommitdiff
path: root/project
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@databricks.com>2015-11-10 15:58:30 -0800
committerReynold Xin <rxin@databricks.com>2015-11-10 15:58:30 -0800
commit1dde39d796bbf42336051a86bedf871c7fddd513 (patch)
treea3a90cade110fe4b3c70039934b9a12d572ab8fa /project
parent32790fe7249b0efe2cbc5c4ee2df0fb687dcd624 (diff)
downloadspark-1dde39d796bbf42336051a86bedf871c7fddd513.tar.gz
spark-1dde39d796bbf42336051a86bedf871c7fddd513.tar.bz2
spark-1dde39d796bbf42336051a86bedf871c7fddd513.zip
[SPARK-9818] Re-enable Docker tests for JDBC data source
This patch re-enables tests for the Docker JDBC data source. These tests were reverted in #4872 due to transitive dependency conflicts introduced by the `docker-client` library. This patch should avoid those problems by using a version of `docker-client` which shades its transitive dependencies and by performing some build-magic to work around problems with that shaded JAR. In addition, I significantly refactored the tests to simplify the setup and teardown code and to fix several Docker networking issues which caused problems when running in `boot2docker`. Closes #8101. Author: Josh Rosen <joshrosen@databricks.com> Author: Yijie Shen <henry.yijieshen@gmail.com> Closes #9503 from JoshRosen/docker-jdbc-tests.
Diffstat (limited to 'project')
-rw-r--r--project/SparkBuild.scala14
1 files changed, 12 insertions, 2 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index a9fb741d75..b7c6192243 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -43,8 +43,9 @@ object BuildCommons {
"streaming-zeromq", "launcher", "unsafe", "test-tags").map(ProjectRef(buildLocation, _))
val optionallyEnabledProjects@Seq(yarn, yarnStable, java8Tests, sparkGangliaLgpl,
- streamingKinesisAsl) = Seq("yarn", "yarn-stable", "java8-tests", "ganglia-lgpl",
- "streaming-kinesis-asl").map(ProjectRef(buildLocation, _))
+ streamingKinesisAsl, dockerIntegrationTests) =
+ Seq("yarn", "yarn-stable", "java8-tests", "ganglia-lgpl", "streaming-kinesis-asl",
+ "docker-integration-tests").map(ProjectRef(buildLocation, _))
val assemblyProjects@Seq(assembly, examples, networkYarn, streamingFlumeAssembly, streamingKafkaAssembly, streamingMqttAssembly, streamingKinesisAslAssembly) =
Seq("assembly", "examples", "network-yarn", "streaming-flume-assembly", "streaming-kafka-assembly", "streaming-mqtt-assembly", "streaming-kinesis-asl-assembly")
@@ -240,6 +241,8 @@ object SparkBuild extends PomBuild {
enable(Flume.settings)(streamingFlumeSink)
+ enable(DockerIntegrationTests.settings)(dockerIntegrationTests)
+
/**
* Adds the ability to run the spark shell directly from SBT without building an assembly
@@ -291,6 +294,13 @@ object Flume {
lazy val settings = sbtavro.SbtAvro.avroSettings
}
+object DockerIntegrationTests {
+ // This serves to override the override specified in DependencyOverrides:
+ lazy val settings = Seq(
+ dependencyOverrides += "com.google.guava" % "guava" % "18.0"
+ )
+}
+
/**
* Overrides to work around sbt's dependency resolution being different from Maven's.
*/