aboutsummaryrefslogtreecommitdiff
path: root/docker-integration-tests/src/test/scala/org
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2016-03-09 18:27:44 +0000
committerSean Owen <sowen@cloudera.com>2016-03-09 18:27:44 +0000
commit256704c771d301700af9ebf0d180c1ba7c4116c0 (patch)
treef9be79919b5c6ec4847c24a086fa844555e2cd12 /docker-integration-tests/src/test/scala/org
parent7791d0c3a9bdfe73e071266846f9ab1491fce50c (diff)
downloadspark-256704c771d301700af9ebf0d180c1ba7c4116c0.tar.gz
spark-256704c771d301700af9ebf0d180c1ba7c4116c0.tar.bz2
spark-256704c771d301700af9ebf0d180c1ba7c4116c0.zip
[SPARK-13595][BUILD] Move docker, extras modules into external
## What changes were proposed in this pull request? Move `docker` dirs out of top level into `external/`; move `extras/*` into `external/` ## How was this patch tested? This is tested with Jenkins tests. Author: Sean Owen <sowen@cloudera.com> Closes #11523 from srowen/SPARK-13595.
Diffstat (limited to 'docker-integration-tests/src/test/scala/org')
-rw-r--r--docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DockerJDBCIntegrationSuite.scala160
-rw-r--r--docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala153
-rw-r--r--docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala78
-rw-r--r--docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala105
-rw-r--r--docker-integration-tests/src/test/scala/org/apache/spark/util/DockerUtils.scala68
5 files changed, 0 insertions, 564 deletions
diff --git a/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DockerJDBCIntegrationSuite.scala b/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DockerJDBCIntegrationSuite.scala
deleted file mode 100644
index f73231fc80..0000000000
--- a/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DockerJDBCIntegrationSuite.scala
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql.jdbc
-
-import java.net.ServerSocket
-import java.sql.Connection
-
-import scala.collection.JavaConverters._
-import scala.util.control.NonFatal
-
-import com.spotify.docker.client._
-import com.spotify.docker.client.messages.{ContainerConfig, HostConfig, PortBinding}
-import org.scalatest.BeforeAndAfterAll
-import org.scalatest.concurrent.Eventually
-import org.scalatest.time.SpanSugar._
-
-import org.apache.spark.SparkFunSuite
-import org.apache.spark.sql.test.SharedSQLContext
-import org.apache.spark.util.DockerUtils
-
-abstract class DatabaseOnDocker {
- /**
- * The docker image to be pulled.
- */
- val imageName: String
-
- /**
- * Environment variables to set inside of the Docker container while launching it.
- */
- val env: Map[String, String]
-
- /**
- * The container-internal JDBC port that the database listens on.
- */
- val jdbcPort: Int
-
- /**
- * Return a JDBC URL that connects to the database running at the given IP address and port.
- */
- def getJdbcUrl(ip: String, port: Int): String
-}
-
-abstract class DockerJDBCIntegrationSuite
- extends SparkFunSuite
- with BeforeAndAfterAll
- with Eventually
- with SharedSQLContext {
-
- val db: DatabaseOnDocker
-
- private var docker: DockerClient = _
- private var containerId: String = _
- protected var jdbcUrl: String = _
-
- override def beforeAll() {
- super.beforeAll()
- try {
- docker = DefaultDockerClient.fromEnv.build()
- // Check that Docker is actually up
- try {
- docker.ping()
- } catch {
- case NonFatal(e) =>
- log.error("Exception while connecting to Docker. Check whether Docker is running.")
- throw e
- }
- // Ensure that the Docker image is installed:
- try {
- docker.inspectImage(db.imageName)
- } catch {
- case e: ImageNotFoundException =>
- log.warn(s"Docker image ${db.imageName} not found; pulling image from registry")
- docker.pull(db.imageName)
- }
- // Configure networking (necessary for boot2docker / Docker Machine)
- val externalPort: Int = {
- val sock = new ServerSocket(0)
- val port = sock.getLocalPort
- sock.close()
- port
- }
- val dockerIp = DockerUtils.getDockerIp()
- val hostConfig: HostConfig = HostConfig.builder()
- .networkMode("bridge")
- .portBindings(
- Map(s"${db.jdbcPort}/tcp" -> List(PortBinding.of(dockerIp, externalPort)).asJava).asJava)
- .build()
- // Create the database container:
- val config = ContainerConfig.builder()
- .image(db.imageName)
- .networkDisabled(false)
- .env(db.env.map { case (k, v) => s"$k=$v" }.toSeq.asJava)
- .hostConfig(hostConfig)
- .exposedPorts(s"${db.jdbcPort}/tcp")
- .build()
- containerId = docker.createContainer(config).id
- // Start the container and wait until the database can accept JDBC connections:
- docker.startContainer(containerId)
- jdbcUrl = db.getJdbcUrl(dockerIp, externalPort)
- eventually(timeout(60.seconds), interval(1.seconds)) {
- val conn = java.sql.DriverManager.getConnection(jdbcUrl)
- conn.close()
- }
- // Run any setup queries:
- val conn: Connection = java.sql.DriverManager.getConnection(jdbcUrl)
- try {
- dataPreparation(conn)
- } finally {
- conn.close()
- }
- } catch {
- case NonFatal(e) =>
- try {
- afterAll()
- } finally {
- throw e
- }
- }
- }
-
- override def afterAll() {
- try {
- if (docker != null) {
- try {
- if (containerId != null) {
- docker.killContainer(containerId)
- docker.removeContainer(containerId)
- }
- } catch {
- case NonFatal(e) =>
- logWarning(s"Could not stop container $containerId", e)
- } finally {
- docker.close()
- }
- }
- } finally {
- super.afterAll()
- }
- }
-
- /**
- * Prepare databases and tables for testing.
- */
- def dataPreparation(connection: Connection): Unit
-}
diff --git a/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala b/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala
deleted file mode 100644
index c68e4dc493..0000000000
--- a/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql.jdbc
-
-import java.math.BigDecimal
-import java.sql.{Connection, Date, Timestamp}
-import java.util.Properties
-
-import org.apache.spark.tags.DockerTest
-
-@DockerTest
-class MySQLIntegrationSuite extends DockerJDBCIntegrationSuite {
- override val db = new DatabaseOnDocker {
- override val imageName = "mysql:5.7.9"
- override val env = Map(
- "MYSQL_ROOT_PASSWORD" -> "rootpass"
- )
- override val jdbcPort: Int = 3306
- override def getJdbcUrl(ip: String, port: Int): String =
- s"jdbc:mysql://$ip:$port/mysql?user=root&password=rootpass"
- }
-
- override def dataPreparation(conn: Connection): Unit = {
- conn.prepareStatement("CREATE DATABASE foo").executeUpdate()
- conn.prepareStatement("CREATE TABLE tbl (x INTEGER, y TEXT(8))").executeUpdate()
- conn.prepareStatement("INSERT INTO tbl VALUES (42,'fred')").executeUpdate()
- conn.prepareStatement("INSERT INTO tbl VALUES (17,'dave')").executeUpdate()
-
- conn.prepareStatement("CREATE TABLE numbers (onebit BIT(1), tenbits BIT(10), "
- + "small SMALLINT, med MEDIUMINT, nor INT, big BIGINT, deci DECIMAL(40,20), flt FLOAT, "
- + "dbl DOUBLE)").executeUpdate()
- conn.prepareStatement("INSERT INTO numbers VALUES (b'0', b'1000100101', "
- + "17, 77777, 123456789, 123456789012345, 123456789012345.123456789012345, "
- + "42.75, 1.0000000000000002)").executeUpdate()
-
- conn.prepareStatement("CREATE TABLE dates (d DATE, t TIME, dt DATETIME, ts TIMESTAMP, "
- + "yr YEAR)").executeUpdate()
- conn.prepareStatement("INSERT INTO dates VALUES ('1991-11-09', '13:31:24', "
- + "'1996-01-01 01:23:45', '2009-02-13 23:31:30', '2001')").executeUpdate()
-
- // TODO: Test locale conversion for strings.
- conn.prepareStatement("CREATE TABLE strings (a CHAR(10), b VARCHAR(10), c TINYTEXT, "
- + "d TEXT, e MEDIUMTEXT, f LONGTEXT, g BINARY(4), h VARBINARY(10), i BLOB)"
- ).executeUpdate()
- conn.prepareStatement("INSERT INTO strings VALUES ('the', 'quick', 'brown', 'fox', " +
- "'jumps', 'over', 'the', 'lazy', 'dog')").executeUpdate()
- }
-
- test("Basic test") {
- val df = sqlContext.read.jdbc(jdbcUrl, "tbl", new Properties)
- val rows = df.collect()
- assert(rows.length == 2)
- val types = rows(0).toSeq.map(x => x.getClass.toString)
- assert(types.length == 2)
- assert(types(0).equals("class java.lang.Integer"))
- assert(types(1).equals("class java.lang.String"))
- }
-
- test("Numeric types") {
- val df = sqlContext.read.jdbc(jdbcUrl, "numbers", new Properties)
- val rows = df.collect()
- assert(rows.length == 1)
- val types = rows(0).toSeq.map(x => x.getClass.toString)
- assert(types.length == 9)
- assert(types(0).equals("class java.lang.Boolean"))
- assert(types(1).equals("class java.lang.Long"))
- assert(types(2).equals("class java.lang.Integer"))
- assert(types(3).equals("class java.lang.Integer"))
- assert(types(4).equals("class java.lang.Integer"))
- assert(types(5).equals("class java.lang.Long"))
- assert(types(6).equals("class java.math.BigDecimal"))
- assert(types(7).equals("class java.lang.Double"))
- assert(types(8).equals("class java.lang.Double"))
- assert(rows(0).getBoolean(0) == false)
- assert(rows(0).getLong(1) == 0x225)
- assert(rows(0).getInt(2) == 17)
- assert(rows(0).getInt(3) == 77777)
- assert(rows(0).getInt(4) == 123456789)
- assert(rows(0).getLong(5) == 123456789012345L)
- val bd = new BigDecimal("123456789012345.12345678901234500000")
- assert(rows(0).getAs[BigDecimal](6).equals(bd))
- assert(rows(0).getDouble(7) == 42.75)
- assert(rows(0).getDouble(8) == 1.0000000000000002)
- }
-
- test("Date types") {
- val df = sqlContext.read.jdbc(jdbcUrl, "dates", new Properties)
- val rows = df.collect()
- assert(rows.length == 1)
- val types = rows(0).toSeq.map(x => x.getClass.toString)
- assert(types.length == 5)
- assert(types(0).equals("class java.sql.Date"))
- assert(types(1).equals("class java.sql.Timestamp"))
- assert(types(2).equals("class java.sql.Timestamp"))
- assert(types(3).equals("class java.sql.Timestamp"))
- assert(types(4).equals("class java.sql.Date"))
- assert(rows(0).getAs[Date](0).equals(Date.valueOf("1991-11-09")))
- assert(rows(0).getAs[Timestamp](1).equals(Timestamp.valueOf("1970-01-01 13:31:24")))
- assert(rows(0).getAs[Timestamp](2).equals(Timestamp.valueOf("1996-01-01 01:23:45")))
- assert(rows(0).getAs[Timestamp](3).equals(Timestamp.valueOf("2009-02-13 23:31:30")))
- assert(rows(0).getAs[Date](4).equals(Date.valueOf("2001-01-01")))
- }
-
- test("String types") {
- val df = sqlContext.read.jdbc(jdbcUrl, "strings", new Properties)
- val rows = df.collect()
- assert(rows.length == 1)
- val types = rows(0).toSeq.map(x => x.getClass.toString)
- assert(types.length == 9)
- assert(types(0).equals("class java.lang.String"))
- assert(types(1).equals("class java.lang.String"))
- assert(types(2).equals("class java.lang.String"))
- assert(types(3).equals("class java.lang.String"))
- assert(types(4).equals("class java.lang.String"))
- assert(types(5).equals("class java.lang.String"))
- assert(types(6).equals("class [B"))
- assert(types(7).equals("class [B"))
- assert(types(8).equals("class [B"))
- assert(rows(0).getString(0).equals("the"))
- assert(rows(0).getString(1).equals("quick"))
- assert(rows(0).getString(2).equals("brown"))
- assert(rows(0).getString(3).equals("fox"))
- assert(rows(0).getString(4).equals("jumps"))
- assert(rows(0).getString(5).equals("over"))
- assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](6), Array[Byte](116, 104, 101, 0)))
- assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](7), Array[Byte](108, 97, 122, 121)))
- assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](8), Array[Byte](100, 111, 103)))
- }
-
- test("Basic write test") {
- val df1 = sqlContext.read.jdbc(jdbcUrl, "numbers", new Properties)
- val df2 = sqlContext.read.jdbc(jdbcUrl, "dates", new Properties)
- val df3 = sqlContext.read.jdbc(jdbcUrl, "strings", new Properties)
- df1.write.jdbc(jdbcUrl, "numberscopy", new Properties)
- df2.write.jdbc(jdbcUrl, "datescopy", new Properties)
- df3.write.jdbc(jdbcUrl, "stringscopy", new Properties)
- }
-}
diff --git a/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala b/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
deleted file mode 100644
index 8a0f938f7e..0000000000
--- a/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql.jdbc
-
-import java.sql.Connection
-import java.util.Properties
-
-import org.apache.spark.sql.test.SharedSQLContext
-import org.apache.spark.tags.DockerTest
-
-/**
- * This patch was tested using the Oracle docker. Created this integration suite for the same.
- * The ojdbc6-11.2.0.2.0.jar was to be downloaded from the maven repository. Since there was
- * no jdbc jar available in the maven repository, the jar was downloaded from oracle site
- * manually and installed in the local; thus tested. So, for SparkQA test case run, the
- * ojdbc jar might be manually placed in the local maven repository(com/oracle/ojdbc6/11.2.0.2.0)
- * while Spark QA test run.
- *
- * The following would be the steps to test this
- * 1. Pull oracle 11g image - docker pull wnameless/oracle-xe-11g
- * 2. Start docker - sudo service docker start
- * 3. Download oracle 11g driver jar and put it in maven local repo:
- * (com/oracle/ojdbc6/11.2.0.2.0/ojdbc6-11.2.0.2.0.jar)
- * 4. The timeout and interval parameter to be increased from 60,1 to a high value for oracle test
- * in DockerJDBCIntegrationSuite.scala (Locally tested with 200,200 and executed successfully).
- * 5. Run spark test - ./build/sbt "test-only org.apache.spark.sql.jdbc.OracleIntegrationSuite"
- *
- * All tests in this suite are ignored because of the dependency with the oracle jar from maven
- * repository.
- */
-@DockerTest
-class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSQLContext {
- import testImplicits._
-
- override val db = new DatabaseOnDocker {
- override val imageName = "wnameless/oracle-xe-11g:latest"
- override val env = Map(
- "ORACLE_ROOT_PASSWORD" -> "oracle"
- )
- override val jdbcPort: Int = 1521
- override def getJdbcUrl(ip: String, port: Int): String =
- s"jdbc:oracle:thin:system/oracle@//$ip:$port/xe"
- }
-
- override def dataPreparation(conn: Connection): Unit = {
- }
-
- ignore("SPARK-12941: String datatypes to be mapped to Varchar in Oracle") {
- // create a sample dataframe with string type
- val df1 = sparkContext.parallelize(Seq(("foo"))).toDF("x")
- // write the dataframe to the oracle table tbl
- df1.write.jdbc(jdbcUrl, "tbl2", new Properties)
- // read the table from the oracle
- val dfRead = sqlContext.read.jdbc(jdbcUrl, "tbl2", new Properties)
- // get the rows
- val rows = dfRead.collect()
- // verify the data type is inserted
- val types = rows(0).toSeq.map(x => x.getClass.toString)
- assert(types(0).equals("class java.lang.String"))
- // verify the value is the inserted correct or not
- assert(rows(0).getString(0).equals("foo"))
- }
-}
diff --git a/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala b/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala
deleted file mode 100644
index d55cdcf28b..0000000000
--- a/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql.jdbc
-
-import java.sql.Connection
-import java.util.Properties
-
-import org.apache.spark.sql.Column
-import org.apache.spark.sql.catalyst.expressions.Literal
-import org.apache.spark.sql.types.{ArrayType, DecimalType}
-import org.apache.spark.tags.DockerTest
-
-@DockerTest
-class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite {
- override val db = new DatabaseOnDocker {
- override val imageName = "postgres:9.4.5"
- override val env = Map(
- "POSTGRES_PASSWORD" -> "rootpass"
- )
- override val jdbcPort = 5432
- override def getJdbcUrl(ip: String, port: Int): String =
- s"jdbc:postgresql://$ip:$port/postgres?user=postgres&password=rootpass"
- }
-
- override def dataPreparation(conn: Connection): Unit = {
- conn.prepareStatement("CREATE DATABASE foo").executeUpdate()
- conn.setCatalog("foo")
- conn.prepareStatement("CREATE TYPE enum_type AS ENUM ('d1', 'd2')").executeUpdate()
- conn.prepareStatement("CREATE TABLE bar (c0 text, c1 integer, c2 double precision, c3 bigint, "
- + "c4 bit(1), c5 bit(10), c6 bytea, c7 boolean, c8 inet, c9 cidr, "
- + "c10 integer[], c11 text[], c12 real[], c13 numeric(2,2)[], c14 enum_type)").executeUpdate()
- conn.prepareStatement("INSERT INTO bar VALUES ('hello', 42, 1.25, 123456789012345, B'0', "
- + "B'1000100101', E'\\\\xDEADBEEF', true, '172.16.0.42', '192.168.0.0/16', "
- + """'{1, 2}', '{"a", null, "b"}', '{0.11, 0.22}', '{0.11, 0.22}', 'd1')""").executeUpdate()
- }
-
- test("Type mapping for various types") {
- val df = sqlContext.read.jdbc(jdbcUrl, "bar", new Properties)
- val rows = df.collect()
- assert(rows.length == 1)
- val types = rows(0).toSeq.map(x => x.getClass)
- assert(types.length == 15)
- assert(classOf[String].isAssignableFrom(types(0)))
- assert(classOf[java.lang.Integer].isAssignableFrom(types(1)))
- assert(classOf[java.lang.Double].isAssignableFrom(types(2)))
- assert(classOf[java.lang.Long].isAssignableFrom(types(3)))
- assert(classOf[java.lang.Boolean].isAssignableFrom(types(4)))
- assert(classOf[Array[Byte]].isAssignableFrom(types(5)))
- assert(classOf[Array[Byte]].isAssignableFrom(types(6)))
- assert(classOf[java.lang.Boolean].isAssignableFrom(types(7)))
- assert(classOf[String].isAssignableFrom(types(8)))
- assert(classOf[String].isAssignableFrom(types(9)))
- assert(classOf[Seq[Int]].isAssignableFrom(types(10)))
- assert(classOf[Seq[String]].isAssignableFrom(types(11)))
- assert(classOf[Seq[Double]].isAssignableFrom(types(12)))
- assert(classOf[Seq[BigDecimal]].isAssignableFrom(types(13)))
- assert(classOf[String].isAssignableFrom(types(14)))
- assert(rows(0).getString(0).equals("hello"))
- assert(rows(0).getInt(1) == 42)
- assert(rows(0).getDouble(2) == 1.25)
- assert(rows(0).getLong(3) == 123456789012345L)
- assert(!rows(0).getBoolean(4))
- // BIT(10)'s come back as ASCII strings of ten ASCII 0's and 1's...
- assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](5),
- Array[Byte](49, 48, 48, 48, 49, 48, 48, 49, 48, 49)))
- assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](6),
- Array[Byte](0xDE.toByte, 0xAD.toByte, 0xBE.toByte, 0xEF.toByte)))
- assert(rows(0).getBoolean(7))
- assert(rows(0).getString(8) == "172.16.0.42")
- assert(rows(0).getString(9) == "192.168.0.0/16")
- assert(rows(0).getSeq(10) == Seq(1, 2))
- assert(rows(0).getSeq(11) == Seq("a", null, "b"))
- assert(rows(0).getSeq(12).toSeq == Seq(0.11f, 0.22f))
- assert(rows(0).getSeq(13) == Seq("0.11", "0.22").map(BigDecimal(_).bigDecimal))
- assert(rows(0).getString(14) == "d1")
- }
-
- test("Basic write test") {
- val df = sqlContext.read.jdbc(jdbcUrl, "bar", new Properties)
- // Test only that it doesn't crash.
- df.write.jdbc(jdbcUrl, "public.barcopy", new Properties)
- // Test that written numeric type has same DataType as input
- assert(sqlContext.read.jdbc(jdbcUrl, "public.barcopy", new Properties).schema(13).dataType ==
- ArrayType(DecimalType(2, 2), true))
- // Test write null values.
- df.select(df.queryExecution.analyzed.output.map { a =>
- Column(Literal.create(null, a.dataType)).as(a.name)
- }: _*).write.jdbc(jdbcUrl, "public.barcopy2", new Properties)
- }
-}
diff --git a/docker-integration-tests/src/test/scala/org/apache/spark/util/DockerUtils.scala b/docker-integration-tests/src/test/scala/org/apache/spark/util/DockerUtils.scala
deleted file mode 100644
index fda377e032..0000000000
--- a/docker-integration-tests/src/test/scala/org/apache/spark/util/DockerUtils.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.util
-
-import java.net.{Inet4Address, InetAddress, NetworkInterface}
-
-import scala.collection.JavaConverters._
-import scala.sys.process._
-import scala.util.Try
-
-private[spark] object DockerUtils {
-
- def getDockerIp(): String = {
- /** If docker-machine is setup on this box, attempts to find the ip from it. */
- def findFromDockerMachine(): Option[String] = {
- sys.env.get("DOCKER_MACHINE_NAME").flatMap { name =>
- Try(Seq("/bin/bash", "-c", s"docker-machine ip $name 2>/dev/null").!!.trim).toOption
- }
- }
- sys.env.get("DOCKER_IP")
- .orElse(findFromDockerMachine())
- .orElse(Try(Seq("/bin/bash", "-c", "boot2docker ip 2>/dev/null").!!.trim).toOption)
- .getOrElse {
- // This block of code is based on Utils.findLocalInetAddress(), but is modified to blacklist
- // certain interfaces.
- val address = InetAddress.getLocalHost
- // Address resolves to something like 127.0.1.1, which happens on Debian; try to find
- // a better address using the local network interfaces
- // getNetworkInterfaces returns ifs in reverse order compared to ifconfig output order
- // on unix-like system. On windows, it returns in index order.
- // It's more proper to pick ip address following system output order.
- val blackListedIFs = Seq(
- "vboxnet0", // Mac
- "docker0" // Linux
- )
- val activeNetworkIFs = NetworkInterface.getNetworkInterfaces.asScala.toSeq.filter { i =>
- !blackListedIFs.contains(i.getName)
- }
- val reOrderedNetworkIFs = activeNetworkIFs.reverse
- for (ni <- reOrderedNetworkIFs) {
- val addresses = ni.getInetAddresses.asScala
- .filterNot(addr => addr.isLinkLocalAddress || addr.isLoopbackAddress).toSeq
- if (addresses.nonEmpty) {
- val addr = addresses.find(_.isInstanceOf[Inet4Address]).getOrElse(addresses.head)
- // because of Inet6Address.toHostName may add interface at the end if it knows about it
- val strippedAddress = InetAddress.getByAddress(addr.getAddress)
- return strippedAddress.getHostAddress
- }
- }
- address.getHostAddress
- }
- }
-}