From a03c5e68abd8c066c97ebd388883070d59dce1a7 Mon Sep 17 00:00:00 2001 From: Luciano Resende Date: Fri, 6 May 2016 12:25:45 +0100 Subject: [SPARK-14738][BUILD] Separate docker integration tests from main build ## What changes were proposed in this pull request? Create a maven profile for executing the docker integration tests using maven Remove docker integration tests from main sbt build Update documentation on how to run docker integration tests from sbt ## How was this patch tested? Manual test of the docker integration tests as in : mvn -Pdocker-integration-tests -pl :spark-docker-integration-tests_2.11 compile test ## Other comments Note that the the DB2 Docker Tests are still disabled as there is a kernel version issue on the AMPLab Jenkins slaves and we would need to get them on the right level before enabling those tests. They do run ok locally with the updates from PR #12348 Author: Luciano Resende Closes #12508 from lresende/docker. --- docs/building-spark.md | 12 ++++++++++++ .../org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala | 3 --- .../org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala | 5 +---- .../org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala | 3 --- pom.xml | 8 +++++++- project/SparkBuild.scala | 3 ++- 6 files changed, 22 insertions(+), 12 deletions(-) diff --git a/docs/building-spark.md b/docs/building-spark.md index fec442af95..13c95e4fcb 100644 --- a/docs/building-spark.md +++ b/docs/building-spark.md @@ -190,6 +190,18 @@ or Java 8 tests are automatically enabled when a Java 8 JDK is detected. If you have JDK 8 installed but it is not the system default, you can set JAVA_HOME to point to JDK 8 before running the tests. +# Running Docker based Integration Test Suites + +Running only docker based integration tests and nothing else. + + mvn install -DskipTests + mvn -Pdocker-integration-tests -pl :spark-docker-integration-tests_2.11 + +or + + sbt docker-integration-tests/test + + # Packaging without Hadoop Dependencies for YARN The assembly directory produced by `mvn package` will, by default, include all of Spark's dependencies, including Hadoop and some of its ecosystem projects. On YARN deployments, this causes multiple versions of these to appear on executor classpaths: the version packaged in the Spark assembly and the version on each node, included with `yarn.application.classpath`. The `hadoop-provided` profile builds the assembly without including Hadoop-ecosystem projects, like ZooKeeper and Hadoop itself. diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala index aa47228eff..a70ed98b52 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala @@ -21,12 +21,9 @@ import java.math.BigDecimal import java.sql.{Connection, Date, Timestamp} import java.util.Properties -import org.scalatest.Ignore - import org.apache.spark.tags.DockerTest @DockerTest -@Ignore class MySQLIntegrationSuite extends DockerJDBCIntegrationSuite { override val db = new DatabaseOnDocker { override val imageName = "mysql:5.7.9" diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala index 357866b87c..c5e1f8607b 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala @@ -20,8 +20,6 @@ package org.apache.spark.sql.jdbc import java.sql.Connection import java.util.Properties -import org.scalatest.Ignore - import org.apache.spark.sql.test.SharedSQLContext import org.apache.spark.tags.DockerTest @@ -46,12 +44,11 @@ import org.apache.spark.tags.DockerTest * repository. */ @DockerTest -@Ignore class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSQLContext { import testImplicits._ override val db = new DatabaseOnDocker { - override val imageName = "wnameless/oracle-xe-11g:latest" + override val imageName = "wnameless/oracle-xe-11g:14.04.4" override val env = Map( "ORACLE_ROOT_PASSWORD" -> "oracle" ) diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala index 6546d4cfd7..79dd70116e 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala @@ -20,15 +20,12 @@ package org.apache.spark.sql.jdbc import java.sql.Connection import java.util.Properties -import org.scalatest.Ignore - import org.apache.spark.sql.Column import org.apache.spark.sql.catalyst.expressions.Literal import org.apache.spark.sql.types.{ArrayType, DecimalType} import org.apache.spark.tags.DockerTest @DockerTest -@Ignore class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite { override val db = new DatabaseOnDocker { override val imageName = "postgres:9.4.5" diff --git a/pom.xml b/pom.xml index 852136a998..3e783fa56e 100644 --- a/pom.xml +++ b/pom.xml @@ -101,7 +101,6 @@ sql/core sql/hive sql/hivecontext-compatibility - external/docker-integration-tests assembly external/flume external/flume-sink @@ -2469,6 +2468,13 @@ + + docker-integration-tests + + external/docker-integration-tests + + +