aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorWeiqing Yang <yangweiqing001@gmail.com>2016-10-08 12:12:35 +0100
committerSean Owen <sowen@cloudera.com>2016-10-08 12:12:35 +0100
commit8a6bbe095b6a9aa33989c0deaa5ed0128d70320f (patch)
tree8e3f1bf702fe83ecf344854016692ef1e15aeb4f
parent4201ddcc07ca2e9af78bf4a74fdb3900c1783347 (diff)
downloadspark-8a6bbe095b6a9aa33989c0deaa5ed0128d70320f.tar.gz
spark-8a6bbe095b6a9aa33989c0deaa5ed0128d70320f.tar.bz2
spark-8a6bbe095b6a9aa33989c0deaa5ed0128d70320f.zip
[MINOR][SQL] Use resource path for test_script.sh
## What changes were proposed in this pull request? This PR modified the test case `test("script")` to use resource path for `test_script.sh`. Make the test case portable (even in IntelliJ). ## How was this patch tested? Passed the test case. Before: Run `test("script")` in IntelliJ: ``` Caused by: org.apache.spark.SparkException: Subprocess exited with status 127. Error: bash: src/test/resources/test_script.sh: No such file or directory ``` After: Test passed. Author: Weiqing Yang <yangweiqing001@gmail.com> Closes #15246 from weiqingy/hivetest.
-rw-r--r--core/src/test/scala/org/apache/spark/SparkFunSuite.scala11
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala6
-rw-r--r--core/src/test/scala/org/apache/spark/ui/UISuite.scala3
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala3
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala15
6 files changed, 28 insertions, 12 deletions
diff --git a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
index cd876807f8..18077c08c9 100644
--- a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
@@ -18,6 +18,8 @@
package org.apache.spark
// scalastyle:off
+import java.io.File
+
import org.scalatest.{BeforeAndAfterAll, FunSuite, Outcome}
import org.apache.spark.internal.Logging
@@ -41,6 +43,15 @@ abstract class SparkFunSuite
}
}
+ // helper function
+ protected final def getTestResourceFile(file: String): File = {
+ new File(getClass.getClassLoader.getResource(file).getFile)
+ }
+
+ protected final def getTestResourcePath(file: String): String = {
+ getTestResourceFile(file).getCanonicalPath
+ }
+
/**
* Log the suite name and the test name before and after each test.
*
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
index 5b316b2f6b..a595bc174a 100644
--- a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
@@ -59,8 +59,8 @@ class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with Matchers
with JsonTestUtils with Eventually with WebBrowser with LocalSparkContext
with ResetSystemProperties {
- private val logDir = new File("src/test/resources/spark-events")
- private val expRoot = new File("src/test/resources/HistoryServerExpectations/")
+ private val logDir = getTestResourcePath("spark-events")
+ private val expRoot = getTestResourceFile("HistoryServerExpectations")
private var provider: FsHistoryProvider = null
private var server: HistoryServer = null
@@ -68,7 +68,7 @@ class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with Matchers
def init(): Unit = {
val conf = new SparkConf()
- .set("spark.history.fs.logDirectory", logDir.getAbsolutePath)
+ .set("spark.history.fs.logDirectory", logDir)
.set("spark.history.fs.update.interval", "0")
.set("spark.testing", "true")
provider = new FsHistoryProvider(conf)
diff --git a/core/src/test/scala/org/apache/spark/ui/UISuite.scala b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
index dbb8dca4c8..4abcfb7e51 100644
--- a/core/src/test/scala/org/apache/spark/ui/UISuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
@@ -53,9 +53,10 @@ class UISuite extends SparkFunSuite {
}
private def sslEnabledConf(): (SparkConf, SSLOptions) = {
+ val keyStoreFilePath = getTestResourcePath("spark.keystore")
val conf = new SparkConf()
.set("spark.ssl.ui.enabled", "true")
- .set("spark.ssl.ui.keyStore", "./src/test/resources/spark.keystore")
+ .set("spark.ssl.ui.keyStore", keyStoreFilePath)
.set("spark.ssl.ui.keyStorePassword", "123456")
.set("spark.ssl.ui.keyPassword", "123456")
(conf, new SecurityManager(conf).getSSLOptions("ui"))
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala
index 9ac1e86fc8..c7f10e569f 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala
@@ -45,7 +45,7 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
// Used for generating new query answer files by saving
private val regenerateGoldenFiles: Boolean = System.getenv("SPARK_GENERATE_GOLDEN_FILES") == "1"
- private val goldenSQLPath = "src/test/resources/sqlgen/"
+ private val goldenSQLPath = getTestResourcePath("sqlgen")
protected override def beforeAll(): Unit = {
super.beforeAll()
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
index 29317e2887..d3873cf6c8 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
@@ -152,7 +152,8 @@ class HiveSparkSubmitSuite
case v if v.startsWith("2.10") || v.startsWith("2.11") => v.substring(0, 4)
case x => throw new Exception(s"Unsupported Scala Version: $x")
}
- val testJar = s"sql/hive/src/test/resources/regression-test-SPARK-8489/test-$version.jar"
+ val jarDir = getTestResourcePath("regression-test-SPARK-8489")
+ val testJar = s"$jarDir/test-$version.jar"
val args = Seq(
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index 6c77a0deb5..6f2a16662b 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -66,13 +66,14 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
import spark.implicits._
test("script") {
+ val scriptFilePath = getTestResourcePath("test_script.sh")
if (testCommandAvailable("bash") && testCommandAvailable("echo | sed")) {
val df = Seq(("x1", "y1", "z1"), ("x2", "y2", "z2")).toDF("c1", "c2", "c3")
df.createOrReplaceTempView("script_table")
val query1 = sql(
- """
+ s"""
|SELECT col1 FROM (from(SELECT c1, c2, c3 FROM script_table) tempt_table
- |REDUCE c1, c2, c3 USING 'bash src/test/resources/test_script.sh' AS
+ |REDUCE c1, c2, c3 USING 'bash $scriptFilePath' AS
|(col1 STRING, col2 STRING)) script_test_table""".stripMargin)
checkAnswer(query1, Row("x1_y1") :: Row("x2_y2") :: Nil)
}
@@ -1290,11 +1291,12 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
.selectExpr("id AS a", "id AS b")
.createOrReplaceTempView("test")
+ val scriptFilePath = getTestResourcePath("data")
checkAnswer(
sql(
- """FROM(
+ s"""FROM(
| FROM test SELECT TRANSFORM(a, b)
- | USING 'python src/test/resources/data/scripts/test_transform.py "\t"'
+ | USING 'python $scriptFilePath/scripts/test_transform.py "\t"'
| AS (c STRING, d STRING)
|) t
|SELECT c
@@ -1308,12 +1310,13 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
.selectExpr("id AS a", "id AS b")
.createOrReplaceTempView("test")
+ val scriptFilePath = getTestResourcePath("data")
val df = sql(
- """FROM test
+ s"""FROM test
|SELECT TRANSFORM(a, b)
|ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
|WITH SERDEPROPERTIES('field.delim' = '|')
- |USING 'python src/test/resources/data/scripts/test_transform.py "|"'
+ |USING 'python $scriptFilePath/scripts/test_transform.py "|"'
|AS (c STRING, d STRING)
|ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
|WITH SERDEPROPERTIES('field.delim' = '|')