aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--docs/building-spark.md12
-rw-r--r--external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala3
-rw-r--r--external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala5
-rw-r--r--external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala3
-rw-r--r--pom.xml8
-rw-r--r--project/SparkBuild.scala3
6 files changed, 22 insertions, 12 deletions
diff --git a/docs/building-spark.md b/docs/building-spark.md
index fec442af95..13c95e4fcb 100644
--- a/docs/building-spark.md
+++ b/docs/building-spark.md
@@ -190,6 +190,18 @@ or
Java 8 tests are automatically enabled when a Java 8 JDK is detected.
If you have JDK 8 installed but it is not the system default, you can set JAVA_HOME to point to JDK 8 before running the tests.
+# Running Docker based Integration Test Suites
+
+Running only docker based integration tests and nothing else.
+
+ mvn install -DskipTests
+ mvn -Pdocker-integration-tests -pl :spark-docker-integration-tests_2.11
+
+or
+
+ sbt docker-integration-tests/test
+
+
# Packaging without Hadoop Dependencies for YARN
The assembly directory produced by `mvn package` will, by default, include all of Spark's dependencies, including Hadoop and some of its ecosystem projects. On YARN deployments, this causes multiple versions of these to appear on executor classpaths: the version packaged in the Spark assembly and the version on each node, included with `yarn.application.classpath`. The `hadoop-provided` profile builds the assembly without including Hadoop-ecosystem projects, like ZooKeeper and Hadoop itself.
diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala
index aa47228eff..a70ed98b52 100644
--- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala
+++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala
@@ -21,12 +21,9 @@ import java.math.BigDecimal
import java.sql.{Connection, Date, Timestamp}
import java.util.Properties
-import org.scalatest.Ignore
-
import org.apache.spark.tags.DockerTest
@DockerTest
-@Ignore
class MySQLIntegrationSuite extends DockerJDBCIntegrationSuite {
override val db = new DatabaseOnDocker {
override val imageName = "mysql:5.7.9"
diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
index 357866b87c..c5e1f8607b 100644
--- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
+++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
@@ -20,8 +20,6 @@ package org.apache.spark.sql.jdbc
import java.sql.Connection
import java.util.Properties
-import org.scalatest.Ignore
-
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.tags.DockerTest
@@ -46,12 +44,11 @@ import org.apache.spark.tags.DockerTest
* repository.
*/
@DockerTest
-@Ignore
class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSQLContext {
import testImplicits._
override val db = new DatabaseOnDocker {
- override val imageName = "wnameless/oracle-xe-11g:latest"
+ override val imageName = "wnameless/oracle-xe-11g:14.04.4"
override val env = Map(
"ORACLE_ROOT_PASSWORD" -> "oracle"
)
diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala
index 6546d4cfd7..79dd70116e 100644
--- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala
+++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala
@@ -20,15 +20,12 @@ package org.apache.spark.sql.jdbc
import java.sql.Connection
import java.util.Properties
-import org.scalatest.Ignore
-
import org.apache.spark.sql.Column
import org.apache.spark.sql.catalyst.expressions.Literal
import org.apache.spark.sql.types.{ArrayType, DecimalType}
import org.apache.spark.tags.DockerTest
@DockerTest
-@Ignore
class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite {
override val db = new DatabaseOnDocker {
override val imageName = "postgres:9.4.5"
diff --git a/pom.xml b/pom.xml
index 852136a998..3e783fa56e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -101,7 +101,6 @@
<module>sql/core</module>
<module>sql/hive</module>
<module>sql/hivecontext-compatibility</module>
- <module>external/docker-integration-tests</module>
<module>assembly</module>
<module>external/flume</module>
<module>external/flume-sink</module>
@@ -2469,6 +2468,13 @@
</build>
</profile>
+ <profile>
+ <id>docker-integration-tests</id>
+ <modules>
+ <module>external/docker-integration-tests</module>
+ </modules>
+ </profile>
+
<!-- A series of build profiles where customizations for particular Hadoop releases can be made -->
<!-- Hadoop-a.b.c dependencies can be found at
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 9249248c71..f50f41a88d 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -382,7 +382,8 @@ object SparkBuild extends PomBuild {
enable(Java8TestSettings.settings)(java8Tests)
- enable(DockerIntegrationTests.settings)(dockerIntegrationTests)
+ // SPARK-14738 - Remove docker tests from main Spark build
+ // enable(DockerIntegrationTests.settings)(dockerIntegrationTests)
/**
* Adds the ability to run the spark shell directly from SBT without building an assembly