aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala6
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala20
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala9
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogBackwardCompatibilitySuite.scala45
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetadataCacheSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetastoreCatalogSuite.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala24
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/MultiDatabaseSuite.scala14
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/PartitionProviderCompatibilitySuite.scala12
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala8
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala25
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala10
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala6
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/PruneFileSourcePartitionsSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala8
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala6
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala2
20 files changed, 106 insertions, 105 deletions
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
index 00e663c324..742f900840 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
@@ -751,7 +751,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac
val partWithExistingDir = CatalogTablePartition(
Map("partCol1" -> "7", "partCol2" -> "8"),
CatalogStorageFormat(
- Some(tempPath.getAbsolutePath),
+ Some(tempPath.toURI.toString),
None, None, None, false, Map.empty))
catalog.createPartitions("db1", "tbl", Seq(partWithExistingDir), ignoreIfExists = false)
@@ -760,7 +760,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac
val partWithNonExistingDir = CatalogTablePartition(
Map("partCol1" -> "9", "partCol2" -> "10"),
CatalogStorageFormat(
- Some(tempPath.getAbsolutePath),
+ Some(tempPath.toURI.toString),
None, None, None, false, Map.empty))
catalog.createPartitions("db1", "tbl", Seq(partWithNonExistingDir), ignoreIfExists = false)
assert(tempPath.exists())
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala
index b0339a88fb..ee280a313c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala
@@ -542,7 +542,7 @@ class ColumnExpressionSuite extends QueryTest with SharedSQLContext {
val q = spark.read.parquet(dir.getCanonicalPath).select(
input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()"))
val firstRow = q.head()
- assert(firstRow.getString(0).contains(dir.getCanonicalPath))
+ assert(firstRow.getString(0).contains(dir.toURI.getPath))
assert(firstRow.getLong(1) == 0)
assert(firstRow.getLong(2) > 0)
@@ -566,7 +566,7 @@ class ColumnExpressionSuite extends QueryTest with SharedSQLContext {
val q = df.select(
input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()"))
val firstRow = q.head()
- assert(firstRow.getString(0).contains(dir.getCanonicalPath))
+ assert(firstRow.getString(0).contains(dir.toURI.getPath))
assert(firstRow.getLong(1) == 0)
assert(firstRow.getLong(2) > 0)
@@ -595,7 +595,7 @@ class ColumnExpressionSuite extends QueryTest with SharedSQLContext {
val q = df.select(
input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()"))
val firstRow = q.head()
- assert(firstRow.getString(0).contains(dir.getCanonicalPath))
+ assert(firstRow.getString(0).contains(dir.toURI.getPath))
assert(firstRow.getLong(1) == 0)
assert(firstRow.getLong(2) > 0)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala
index 391bcb8b35..5c63c6a414 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala
@@ -93,7 +93,7 @@ class GlobalTempViewSuite extends QueryTest with SharedSQLContext {
withTempPath { path =>
try {
Seq(1 -> "a").toDF("i", "j").write.parquet(path.getAbsolutePath)
- sql(s"CREATE GLOBAL TEMP VIEW src USING parquet OPTIONS (PATH '${path.getAbsolutePath}')")
+ sql(s"CREATE GLOBAL TEMP VIEW src USING parquet OPTIONS (PATH '${path.toURI}')")
checkAnswer(spark.table(s"$globalTempDB.src"), Row(1, "a"))
sql(s"INSERT INTO $globalTempDB.src SELECT 2, 'b'")
checkAnswer(spark.table(s"$globalTempDB.src"), Row(1, "a") :: Row(2, "b") :: Nil)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala
index 61939fe5ef..99da1969fc 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala
@@ -70,7 +70,7 @@ class CreateTableAsSelectSuite
|CREATE TABLE jsonTable
|USING json
|OPTIONS (
- | path '${path.toString}'
+ | path '${path.toURI}'
|) AS
|SELECT a, b FROM jt
""".stripMargin)
@@ -94,7 +94,7 @@ class CreateTableAsSelectSuite
|CREATE TABLE jsonTable
|USING json
|OPTIONS (
- | path '${childPath.toString}'
+ | path '${childPath.toURI}'
|) AS
|SELECT a, b FROM jt
""".stripMargin)
@@ -112,7 +112,7 @@ class CreateTableAsSelectSuite
|CREATE TABLE jsonTable
|USING json
|OPTIONS (
- | path '${path.toString}'
+ | path '${path.toURI}'
|) AS
|SELECT a, b FROM jt
""".stripMargin)
@@ -127,7 +127,7 @@ class CreateTableAsSelectSuite
|CREATE TABLE IF NOT EXISTS jsonTable
|USING json
|OPTIONS (
- | path '${path.toString}'
+ | path '${path.toURI}'
|) AS
|SELECT a * 4 FROM jt
""".stripMargin)
@@ -145,7 +145,7 @@ class CreateTableAsSelectSuite
|CREATE TABLE jsonTable
|USING json
|OPTIONS (
- | path '${path.toString}'
+ | path '${path.toURI}'
|) AS
|SELECT b FROM jt
""".stripMargin)
@@ -162,7 +162,7 @@ class CreateTableAsSelectSuite
sql(
s"""
|CREATE TEMPORARY TABLE t USING PARQUET
- |OPTIONS (PATH '${path.toString}')
+ |OPTIONS (PATH '${path.toURI}')
|PARTITIONED BY (a)
|AS SELECT 1 AS a, 2 AS b
""".stripMargin
@@ -179,7 +179,7 @@ class CreateTableAsSelectSuite
sql(
s"""
|CREATE EXTERNAL TABLE t USING PARQUET
- |OPTIONS (PATH '${path.toString}')
+ |OPTIONS (PATH '${path.toURI}')
|AS SELECT 1 AS a, 2 AS b
""".stripMargin
)
@@ -196,7 +196,7 @@ class CreateTableAsSelectSuite
sql(
s"""
|CREATE TABLE t USING PARQUET
- |OPTIONS (PATH '${path.toString}')
+ |OPTIONS (PATH '${path.toURI}')
|PARTITIONED BY (a)
|AS SELECT 1 AS a, 2 AS b
""".stripMargin
@@ -212,7 +212,7 @@ class CreateTableAsSelectSuite
sql(
s"""
|CREATE TABLE t USING PARQUET
- |OPTIONS (PATH '${path.toString}')
+ |OPTIONS (PATH '${path.toURI}')
|CLUSTERED BY (a) SORTED BY (b) INTO 5 BUCKETS
|AS SELECT 1 AS a, 2 AS b
""".stripMargin
@@ -228,7 +228,7 @@ class CreateTableAsSelectSuite
sql(
s"""
|CREATE TABLE t USING PARQUET
- |OPTIONS (PATH '${path.toString}')
+ |OPTIONS (PATH '${path.toURI}')
|CLUSTERED BY (a) SORTED BY (b) INTO 0 BUCKETS
|AS SELECT 1 AS a, 2 AS b
""".stripMargin
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
index 097dd6e367..30a957ef81 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
@@ -22,6 +22,7 @@ import java.util.concurrent.TimeUnit
import scala.concurrent.duration._
+import org.apache.hadoop.fs.Path
import org.mockito.Mockito._
import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
import org.scalatest.PrivateMethodTester.PrivateMethod
@@ -355,7 +356,7 @@ class DataStreamReaderWriterSuite extends StreamTest with BeforeAndAfter with Pr
test("source metadataPath") {
LastOptions.clear()
- val checkpointLocation = newMetadataDir
+ val checkpointLocationURI = new Path(newMetadataDir).toUri
val df1 = spark.readStream
.format("org.apache.spark.sql.streaming.test")
@@ -367,21 +368,21 @@ class DataStreamReaderWriterSuite extends StreamTest with BeforeAndAfter with Pr
val q = df1.union(df2).writeStream
.format("org.apache.spark.sql.streaming.test")
- .option("checkpointLocation", checkpointLocation)
+ .option("checkpointLocation", checkpointLocationURI.toString)
.trigger(ProcessingTime(10.seconds))
.start()
q.stop()
verify(LastOptions.mockStreamSourceProvider).createSource(
spark.sqlContext,
- checkpointLocation + "/sources/0",
+ s"$checkpointLocationURI/sources/0",
None,
"org.apache.spark.sql.streaming.test",
Map.empty)
verify(LastOptions.mockStreamSourceProvider).createSource(
spark.sqlContext,
- checkpointLocation + "/sources/1",
+ s"$checkpointLocationURI/sources/1",
None,
"org.apache.spark.sql.streaming.test",
Map.empty)
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogBackwardCompatibilitySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogBackwardCompatibilitySuite.scala
index c5753cec80..00fdfbcebb 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogBackwardCompatibilitySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogBackwardCompatibilitySuite.scala
@@ -19,6 +19,7 @@ package org.apache.spark.sql.hive
import java.net.URI
+import org.apache.hadoop.fs.Path
import org.scalatest.BeforeAndAfterEach
import org.apache.spark.sql.QueryTest
@@ -39,6 +40,7 @@ class HiveExternalCatalogBackwardCompatibilitySuite extends QueryTest
spark.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog].client
val tempDir = Utils.createTempDir().getCanonicalFile
+ val tempDirUri = tempDir.toURI.toString.stripSuffix("/")
override def beforeEach(): Unit = {
sql("CREATE DATABASE test_db")
@@ -56,11 +58,12 @@ class HiveExternalCatalogBackwardCompatibilitySuite extends QueryTest
spark.sharedState.externalCatalog.getTable("test_db", tableName)
}
- private def defaultTablePath(tableName: String): String = {
- spark.sessionState.catalog.defaultTablePath(TableIdentifier(tableName, Some("test_db")))
+ private def defaultTableURI(tableName: String): URI = {
+ val defaultPath =
+ spark.sessionState.catalog.defaultTablePath(TableIdentifier(tableName, Some("test_db")))
+ new Path(defaultPath).toUri
}
-
// Raw table metadata that are dumped from tables created by Spark 2.0. Note that, all spark
// versions prior to 2.1 would generate almost same raw table metadata for a specific table.
val simpleSchema = new StructType().add("i", "int")
@@ -78,7 +81,7 @@ class HiveExternalCatalogBackwardCompatibilitySuite extends QueryTest
identifier = TableIdentifier("tbl2", Some("test_db")),
tableType = CatalogTableType.EXTERNAL,
storage = CatalogStorageFormat.empty.copy(
- locationUri = Some(tempDir.getAbsolutePath),
+ locationUri = Some(tempDirUri),
inputFormat = Some("org.apache.hadoop.mapred.TextInputFormat"),
outputFormat = Some("org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat")),
schema = simpleSchema)
@@ -128,7 +131,8 @@ class HiveExternalCatalogBackwardCompatibilitySuite extends QueryTest
lazy val dataSourceTable = CatalogTable(
identifier = TableIdentifier("tbl4", Some("test_db")),
tableType = CatalogTableType.MANAGED,
- storage = CatalogStorageFormat.empty.copy(properties = Map("path" -> defaultTablePath("tbl4"))),
+ storage = CatalogStorageFormat.empty.copy(
+ properties = Map("path" -> defaultTableURI("tbl4").toString)),
schema = new StructType(),
properties = Map(
"spark.sql.sources.provider" -> "json",
@@ -138,7 +142,8 @@ class HiveExternalCatalogBackwardCompatibilitySuite extends QueryTest
lazy val hiveCompatibleDataSourceTable = CatalogTable(
identifier = TableIdentifier("tbl5", Some("test_db")),
tableType = CatalogTableType.MANAGED,
- storage = CatalogStorageFormat.empty.copy(properties = Map("path" -> defaultTablePath("tbl5"))),
+ storage = CatalogStorageFormat.empty.copy(
+ properties = Map("path" -> defaultTableURI("tbl5").toString)),
schema = simpleSchema,
properties = Map(
"spark.sql.sources.provider" -> "parquet",
@@ -148,7 +153,8 @@ class HiveExternalCatalogBackwardCompatibilitySuite extends QueryTest
lazy val partitionedDataSourceTable = CatalogTable(
identifier = TableIdentifier("tbl6", Some("test_db")),
tableType = CatalogTableType.MANAGED,
- storage = CatalogStorageFormat.empty.copy(properties = Map("path" -> defaultTablePath("tbl6"))),
+ storage = CatalogStorageFormat.empty.copy(
+ properties = Map("path" -> defaultTableURI("tbl6").toString)),
schema = new StructType(),
properties = Map(
"spark.sql.sources.provider" -> "json",
@@ -161,8 +167,8 @@ class HiveExternalCatalogBackwardCompatibilitySuite extends QueryTest
identifier = TableIdentifier("tbl7", Some("test_db")),
tableType = CatalogTableType.EXTERNAL,
storage = CatalogStorageFormat.empty.copy(
- locationUri = Some(defaultTablePath("tbl7") + "-__PLACEHOLDER__"),
- properties = Map("path" -> tempDir.getAbsolutePath)),
+ locationUri = Some(defaultTableURI("tbl7").toString + "-__PLACEHOLDER__"),
+ properties = Map("path" -> tempDirUri)),
schema = new StructType(),
properties = Map(
"spark.sql.sources.provider" -> "json",
@@ -173,8 +179,8 @@ class HiveExternalCatalogBackwardCompatibilitySuite extends QueryTest
identifier = TableIdentifier("tbl8", Some("test_db")),
tableType = CatalogTableType.EXTERNAL,
storage = CatalogStorageFormat.empty.copy(
- locationUri = Some(tempDir.getAbsolutePath),
- properties = Map("path" -> tempDir.getAbsolutePath)),
+ locationUri = Some(tempDirUri),
+ properties = Map("path" -> tempDirUri)),
schema = simpleSchema,
properties = Map(
"spark.sql.sources.provider" -> "parquet",
@@ -185,8 +191,8 @@ class HiveExternalCatalogBackwardCompatibilitySuite extends QueryTest
identifier = TableIdentifier("tbl9", Some("test_db")),
tableType = CatalogTableType.EXTERNAL,
storage = CatalogStorageFormat.empty.copy(
- locationUri = Some(defaultTablePath("tbl9") + "-__PLACEHOLDER__"),
- properties = Map("path" -> tempDir.getAbsolutePath)),
+ locationUri = Some(defaultTableURI("tbl9").toString + "-__PLACEHOLDER__"),
+ properties = Map("path" -> tempDirUri)),
schema = new StructType(),
properties = Map("spark.sql.sources.provider" -> "json"))
@@ -210,7 +216,8 @@ class HiveExternalCatalogBackwardCompatibilitySuite extends QueryTest
if (tbl.tableType == CatalogTableType.EXTERNAL) {
// trim the URI prefix
val tableLocation = new URI(readBack.storage.locationUri.get).getPath
- assert(tableLocation == tempDir.getAbsolutePath)
+ val expectedLocation = tempDir.toURI.getPath.stripSuffix("/")
+ assert(tableLocation == expectedLocation)
}
}
}
@@ -218,13 +225,15 @@ class HiveExternalCatalogBackwardCompatibilitySuite extends QueryTest
test("make sure we can alter table location created by old version of Spark") {
withTempDir { dir =>
for ((tbl, _) <- rawTablesAndExpectations if tbl.tableType == CatalogTableType.EXTERNAL) {
- sql(s"ALTER TABLE ${tbl.identifier} SET LOCATION '${dir.getAbsolutePath}'")
+ val path = dir.toURI.toString.stripSuffix("/")
+ sql(s"ALTER TABLE ${tbl.identifier} SET LOCATION '$path'")
val readBack = getTableMetadata(tbl.identifier.table)
// trim the URI prefix
val actualTableLocation = new URI(readBack.storage.locationUri.get).getPath
- assert(actualTableLocation == dir.getAbsolutePath)
+ val expected = dir.toURI.getPath.stripSuffix("/")
+ assert(actualTableLocation == expected)
}
}
}
@@ -240,10 +249,10 @@ class HiveExternalCatalogBackwardCompatibilitySuite extends QueryTest
// trim the URI prefix
val actualTableLocation = new URI(readBack.storage.locationUri.get).getPath
val expectedLocation = if (tbl.tableType == CatalogTableType.EXTERNAL) {
- tempDir.getAbsolutePath
+ tempDir.toURI.getPath.stripSuffix("/")
} else {
// trim the URI prefix
- new URI(defaultTablePath(newName)).getPath
+ defaultTableURI(newName).getPath
}
assert(actualTableLocation == expectedLocation)
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetadataCacheSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetadataCacheSuite.scala
index 6e887d95c0..0c28a1b609 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetadataCacheSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetadataCacheSuite.scala
@@ -75,7 +75,7 @@ class HiveMetadataCacheSuite extends QueryTest with SQLTestUtils with TestHiveSi
|create external table test (id long)
|partitioned by (f1 int, f2 int)
|stored as parquet
- |location "${dir.getAbsolutePath}"""".stripMargin)
+ |location "${dir.toURI}"""".stripMargin)
spark.sql("msck repair table test")
val df = spark.sql("select * from test")
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetastoreCatalogSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetastoreCatalogSuite.scala
index 0a280b4952..16cf4d7ec6 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetastoreCatalogSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetastoreCatalogSuite.scala
@@ -156,11 +156,9 @@ class DataSourceWithHiveMetastoreCatalogSuite
test(s"Persist non-partitioned $provider relation into metastore as managed table using CTAS") {
withTempPath { dir =>
withTable("t") {
- val path = dir.getCanonicalPath
-
sql(
s"""CREATE TABLE t USING $provider
- |OPTIONS (path '$path')
+ |OPTIONS (path '${dir.toURI}')
|AS SELECT 1 AS d1, "val_1" AS d2
""".stripMargin)
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
index 0f787be0bb..2b8d4e2bb3 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
@@ -179,7 +179,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
s"""CREATE TABLE jsonTable
|USING org.apache.spark.sql.json
|OPTIONS (
- | path '${tempDir.getCanonicalPath}'
+ | path '${tempDir.toURI}'
|)
""".stripMargin)
@@ -215,7 +215,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
s"""CREATE TABLE jsonTable
|USING org.apache.spark.sql.json
|OPTIONS (
- | path '${tempDir.getCanonicalPath}'
+ | path '${tempDir.toURI}'
|)
""".stripMargin)
@@ -232,7 +232,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
s"""CREATE TABLE jsonTable
|USING org.apache.spark.sql.json
|OPTIONS (
- | path '${tempDir.getCanonicalPath}'
+ | path '${tempDir.toURI}'
|)
""".stripMargin)
@@ -291,7 +291,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
s"""CREATE TABLE ctasJsonTable
|USING org.apache.spark.sql.json.DefaultSource
|OPTIONS (
- | path '$tempPath'
+ | path '${tempPath.toURI}'
|) AS
|SELECT * FROM jsonTable
""".stripMargin)
@@ -307,7 +307,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
test("CTAS with IF NOT EXISTS") {
withTempPath { path =>
- val tempPath = path.getCanonicalPath
+ val tempPath = path.toURI
withTable("jsonTable", "ctasJsonTable") {
sql(
@@ -1049,11 +1049,9 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
test("CTAS: persisted partitioned data source table") {
withTempPath { dir =>
withTable("t") {
- val path = dir.getCanonicalPath
-
sql(
s"""CREATE TABLE t USING PARQUET
- |OPTIONS (PATH '$path')
+ |OPTIONS (PATH '${dir.toURI}')
|PARTITIONED BY (a)
|AS SELECT 1 AS a, 2 AS b
""".stripMargin
@@ -1119,11 +1117,9 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
test("CTAS: persisted partitioned bucketed data source table") {
withTempPath { dir =>
withTable("t") {
- val path = dir.getCanonicalPath
-
sql(
s"""CREATE TABLE t USING PARQUET
- |OPTIONS (PATH '$path')
+ |OPTIONS (PATH '${dir.toURI}')
|PARTITIONED BY (a)
|CLUSTERED BY (b) SORTED BY (c) INTO 2 BUCKETS
|AS SELECT 1 AS a, 2 AS b, 3 AS c
@@ -1289,11 +1285,9 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
test("SPARK-15025: create datasource table with path with select") {
withTempPath { dir =>
withTable("t") {
- val path = dir.getCanonicalPath
-
sql(
s"""CREATE TABLE t USING PARQUET
- |OPTIONS (PATH '$path')
+ |OPTIONS (PATH '${dir.toURI}')
|AS SELECT 1 AS a, 2 AS b, 3 AS c
""".stripMargin
)
@@ -1307,7 +1301,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
test("SPARK-15269 external data source table creation") {
withTempPath { dir =>
- val path = dir.getCanonicalPath
+ val path = dir.toURI.toString
spark.range(1).write.json(path)
withTable("t") {
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MultiDatabaseSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MultiDatabaseSuite.scala
index 7322465109..47ee4dd4d9 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MultiDatabaseSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MultiDatabaseSuite.scala
@@ -80,7 +80,7 @@ class MultiDatabaseSuite extends QueryTest with SQLTestUtils with TestHiveSingle
|CREATE TABLE t1
|USING parquet
|OPTIONS (
- | path '$path'
+ | path '${dir.toURI}'
|)
""".stripMargin)
assert(getTableNames(Option(db)).contains("t1"))
@@ -105,7 +105,7 @@ class MultiDatabaseSuite extends QueryTest with SQLTestUtils with TestHiveSingle
|CREATE TABLE $db.t1
|USING parquet
|OPTIONS (
- | path '$path'
+ | path '${dir.toURI}'
|)
""".stripMargin)
assert(getTableNames(Option(db)).contains("t1"))
@@ -212,7 +212,7 @@ class MultiDatabaseSuite extends QueryTest with SQLTestUtils with TestHiveSingle
s"""CREATE EXTERNAL TABLE t (id BIGINT)
|PARTITIONED BY (p INT)
|STORED AS PARQUET
- |LOCATION '$path'
+ |LOCATION '${dir.toURI}'
""".stripMargin)
checkAnswer(spark.table("t"), spark.emptyDataFrame)
@@ -244,7 +244,7 @@ class MultiDatabaseSuite extends QueryTest with SQLTestUtils with TestHiveSingle
s"""CREATE EXTERNAL TABLE $db.t (id BIGINT)
|PARTITIONED BY (p INT)
|STORED AS PARQUET
- |LOCATION '$path'
+ |LOCATION '${dir.toURI}'
""".stripMargin)
checkAnswer(spark.table(s"$db.t"), spark.emptyDataFrame)
@@ -280,8 +280,6 @@ class MultiDatabaseSuite extends QueryTest with SQLTestUtils with TestHiveSingle
}
withTempDir { dir =>
- val path = dir.getCanonicalPath
-
{
val message = intercept[AnalysisException] {
sql(
@@ -289,7 +287,7 @@ class MultiDatabaseSuite extends QueryTest with SQLTestUtils with TestHiveSingle
|CREATE TABLE `d:b`.`t:a` (a int)
|USING parquet
|OPTIONS (
- | path '$path'
+ | path '${dir.toURI}'
|)
""".stripMargin)
}.getMessage
@@ -304,7 +302,7 @@ class MultiDatabaseSuite extends QueryTest with SQLTestUtils with TestHiveSingle
|CREATE TABLE `d:b`.`table` (a int)
|USING parquet
|OPTIONS (
- | path '$path'
+ | path '${dir.toURI}'
|)
""".stripMargin)
}.getMessage
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala
index 14266e6847..05b6059472 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala
@@ -45,7 +45,7 @@ class ParquetHiveCompatibilitySuite extends ParquetCompatibilityTest with TestHi
private def testParquetHiveCompatibility(row: Row, hiveTypes: String*): Unit = {
withTable("parquet_compat") {
withTempPath { dir =>
- val path = dir.getCanonicalPath
+ val path = dir.toURI.toString
// Hive columns are always nullable, so here we append a all-null row.
val rows = row :: Row(Seq.fill(row.length)(null): _*) :: Nil
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/PartitionProviderCompatibilitySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/PartitionProviderCompatibilitySuite.scala
index 3f84cbdb1b..f88fc4a2ce 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/PartitionProviderCompatibilitySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/PartitionProviderCompatibilitySuite.scala
@@ -38,7 +38,7 @@ class PartitionProviderCompatibilitySuite
spark.sql(s"""
|create table $tableName (fieldOne long, partCol int)
|using parquet
- |options (path "${dir.getAbsolutePath}")
+ |options (path "${dir.toURI}")
|partitioned by (partCol)""".stripMargin)
}
@@ -239,7 +239,7 @@ class PartitionProviderCompatibilitySuite
// custom locations sanity check
spark.sql(s"""
|alter table test partition (A=0, B='%')
- |set location '${dir.getAbsolutePath}'""".stripMargin)
+ |set location '${dir.toURI}'""".stripMargin)
assert(spark.sql("select * from test").count() == 28) // moved to empty dir
// rename partition sanity check
@@ -315,11 +315,11 @@ class PartitionProviderCompatibilitySuite
spark.sql(s"""
|create table test (id long, P1 int, P2 int)
|using parquet
- |options (path "${base.getAbsolutePath}")
+ |options (path "${base.toURI}")
|partitioned by (P1, P2)""".stripMargin)
- spark.sql(s"alter table test add partition (P1=0, P2=0) location '${a.getAbsolutePath}'")
- spark.sql(s"alter table test add partition (P1=0, P2=1) location '${b.getAbsolutePath}'")
- spark.sql(s"alter table test add partition (P1=1, P2=0) location '${c.getAbsolutePath}'")
+ spark.sql(s"alter table test add partition (P1=0, P2=0) location '${a.toURI}'")
+ spark.sql(s"alter table test add partition (P1=0, P2=1) location '${b.toURI}'")
+ spark.sql(s"alter table test add partition (P1=1, P2=0) location '${c.toURI}'")
spark.sql(s"alter table test add partition (P1=1, P2=1)")
testFn
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala
index feeaade561..b20c10c6a3 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala
@@ -40,7 +40,7 @@ class QueryPartitionSuite extends QueryTest with SQLTestUtils with TestHiveSingl
val tmpDir = Files.createTempDir()
// create the table for test
sql(s"CREATE TABLE table_with_partition(key int,value string) " +
- s"PARTITIONED by (ds string) location '${tmpDir.toURI.toString}' ")
+ s"PARTITIONED by (ds string) location '${tmpDir.toURI}' ")
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='1') " +
"SELECT key,value FROM testData")
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='2') " +
@@ -71,12 +71,12 @@ class QueryPartitionSuite extends QueryTest with SQLTestUtils with TestHiveSingl
test("SPARK-13709: reading partitioned Avro table with nested schema") {
withTempDir { dir =>
- val path = dir.getCanonicalPath
+ val path = dir.toURI.toString
val tableName = "spark_13709"
val tempTableName = "spark_13709_temp"
- new File(path, tableName).mkdir()
- new File(path, tempTableName).mkdir()
+ new File(dir.getAbsolutePath, tableName).mkdir()
+ new File(dir.getAbsolutePath, tempTableName).mkdir()
val avroSchema =
"""{
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
index 1680f6c40a..8fda1c5875 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
@@ -26,7 +26,6 @@ import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable, CatalogTableType}
import org.apache.spark.sql.hive.test.TestHiveSingleton
-import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SQLTestUtils
import org.apache.spark.sql.types.StructType
@@ -202,11 +201,12 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
// LOAD DATA INTO non-partitioned table can't specify partition
intercept[AnalysisException] {
- sql(s"""$loadQuery INPATH "$testData" INTO TABLE non_part_table PARTITION(ds="1")""")
+ sql(
+ s"""$loadQuery INPATH "${testData.toURI}" INTO TABLE non_part_table PARTITION(ds="1")""")
}
withInputFile { path =>
- sql(s"""$loadQuery INPATH "$path" INTO TABLE non_part_table""")
+ sql(s"""$loadQuery INPATH "${path.toURI}" INTO TABLE non_part_table""")
// Non-local mode is expected to move the file, while local mode is expected to copy it.
// Check once here that the behavior is the expected.
@@ -222,7 +222,7 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
//
// TODO: need a similar test for non-local mode.
if (local) {
- val incorrectUri = "file:/" + testData.getAbsolutePath()
+ val incorrectUri = "file://path/to/data/files/employee.dat"
intercept[AnalysisException] {
sql(s"""LOAD DATA LOCAL INPATH "$incorrectUri" INTO TABLE non_part_table""")
}
@@ -231,7 +231,7 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
// Use URI as inpath:
// file:/path/to/data/files/employee.dat
withInputFile { path =>
- sql(s"""$loadQuery INPATH "${path.toURI()}" INTO TABLE non_part_table""")
+ sql(s"""$loadQuery INPATH "${path.toURI}" INTO TABLE non_part_table""")
}
checkAnswer(
@@ -240,7 +240,7 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
// Overwrite existing data.
withInputFile { path =>
- sql(s"""$loadQuery INPATH "${path.toURI()}" OVERWRITE INTO TABLE non_part_table""")
+ sql(s"""$loadQuery INPATH "${path.toURI}" OVERWRITE INTO TABLE non_part_table""")
}
checkAnswer(
@@ -257,7 +257,8 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
""".stripMargin)
// LOAD DATA INTO partitioned table must specify partition
- withInputFile { path =>
+ withInputFile { f =>
+ val path = f.toURI
intercept[AnalysisException] {
sql(s"""$loadQuery INPATH "$path" INTO TABLE part_table""")
}
@@ -273,16 +274,16 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
}
}
- withInputFile { path =>
- sql(s"""$loadQuery INPATH "$path" INTO TABLE part_table PARTITION(c="1", d="2")""")
+ withInputFile { f =>
+ sql(s"""$loadQuery INPATH "${f.toURI}" INTO TABLE part_table PARTITION(c="1", d="2")""")
}
checkAnswer(
sql("SELECT employeeID, employeeName FROM part_table WHERE c = '1' AND d = '2'"),
sql("SELECT * FROM non_part_table").collect())
// Different order of partition columns.
- withInputFile { path =>
- sql(s"""$loadQuery INPATH "$path" INTO TABLE part_table PARTITION(d="1", c="2")""")
+ withInputFile { f =>
+ sql(s"""$loadQuery INPATH "${f.toURI}" INTO TABLE part_table PARTITION(d="1", c="2")""")
}
checkAnswer(
sql("SELECT employeeID, employeeName FROM part_table WHERE c = '2' AND d = '1'"),
@@ -300,7 +301,7 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
|LINES TERMINATED BY '\n'
""".stripMargin)
- val testData = hiveContext.getHiveFile("data/files/employee.dat").getCanonicalPath
+ val testData = hiveContext.getHiveFile("data/files/employee.dat").toURI
sql(s"""LOAD DATA LOCAL INPATH "$testData" INTO TABLE non_part_table""")
checkAnswer(
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index 6fbbed1d47..e5b23dafcf 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -858,8 +858,8 @@ class HiveQuerySuite extends HiveComparisonTest with SQLTestUtils with BeforeAnd
test("ADD JAR command 2") {
// this is a test case from mapjoin_addjar.q
- val testJar = TestHive.getHiveFile("hive-hcatalog-core-0.13.1.jar").getCanonicalPath
- val testData = TestHive.getHiveFile("data/files/sample.json").getCanonicalPath
+ val testJar = TestHive.getHiveFile("hive-hcatalog-core-0.13.1.jar").toURI
+ val testData = TestHive.getHiveFile("data/files/sample.json").toURI
sql(s"ADD JAR $testJar")
sql(
"""CREATE TABLE t1(a string, b string)
@@ -877,8 +877,8 @@ class HiveQuerySuite extends HiveComparisonTest with SQLTestUtils with BeforeAnd
}
test("CREATE TEMPORARY FUNCTION") {
- val funcJar = TestHive.getHiveFile("TestUDTF.jar").getCanonicalPath
- val jarURL = s"file://$funcJar"
+ val funcJar = TestHive.getHiveFile("TestUDTF.jar")
+ val jarURL = funcJar.toURI.toURL
sql(s"ADD JAR $jarURL")
sql(
"""CREATE TEMPORARY FUNCTION udtf_count2 AS
@@ -889,7 +889,7 @@ class HiveQuerySuite extends HiveComparisonTest with SQLTestUtils with BeforeAnd
}
test("ADD FILE command") {
- val testFile = TestHive.getHiveFile("data/files/v1.txt").getCanonicalFile
+ val testFile = TestHive.getHiveFile("data/files/v1.txt").toURI
sql(s"ADD FILE $testFile")
val checkAddFileRDD = sparkContext.parallelize(1 to 2, 1).mapPartitions { _ =>
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
index 4098bb597b..58909ab9ea 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
@@ -428,7 +428,7 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
\"separatorChar\" = \",\",
\"quoteChar\" = \"\\\"\",
\"escapeChar\" = \"\\\\\")
- LOCATION '$tempDir'
+ LOCATION '${tempDir.toURI}'
""")
val answer1 =
@@ -444,7 +444,7 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
sql(
s"""CREATE EXTERNAL TABLE external_t5 (c1 int, c2 int)
ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
- LOCATION '$tempDir'
+ LOCATION '${tempDir.toURI}'
""")
val answer2 =
@@ -460,7 +460,7 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
// External parquet pointing to LOCATION
- val parquetLocation = tempDir + "/external_parquet"
+ val parquetLocation = s"${tempDir.toURI}/external_parquet"
sql("SELECT 1, 2").write.parquet(parquetLocation)
sql(
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/PruneFileSourcePartitionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/PruneFileSourcePartitionsSuite.scala
index cdbc26cd5c..cd8f94b1cc 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/PruneFileSourcePartitionsSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/PruneFileSourcePartitionsSuite.scala
@@ -42,7 +42,7 @@ class PruneFileSourcePartitionsSuite extends QueryTest with SQLTestUtils with Te
|CREATE EXTERNAL TABLE test(i int)
|PARTITIONED BY (p int)
|STORED AS parquet
- |LOCATION '${dir.getAbsolutePath}'""".stripMargin)
+ |LOCATION '${dir.toURI}'""".stripMargin)
val tableMeta = spark.sharedState.externalCatalog.getTable("default", "test")
val catalogFileIndex = new CatalogFileIndex(spark, tableMeta, 0)
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index e607af67f9..f65b5f4daa 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -346,7 +346,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
s"""
|CREATE TEMPORARY FUNCTION udtf_count_temp
|AS 'org.apache.spark.sql.hive.execution.GenericUDTFCount2'
- |USING JAR '${hiveContext.getHiveFile("TestUDTF.jar").getCanonicalPath()}'
+ |USING JAR '${hiveContext.getHiveFile("TestUDTF.jar").toURI}'
""".stripMargin)
checkKeywordsExist(sql("describe function udtf_count_temp"),
@@ -1290,7 +1290,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
test("specifying database name for a temporary table is not allowed") {
withTempPath { dir =>
- val path = dir.getCanonicalPath
+ val path = dir.toURI.toString
val df = sparkContext.parallelize(1 to 10).map(i => (i, i.toString)).toDF("num", "str")
df
.write
@@ -1298,7 +1298,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
.save(path)
// We don't support creating a temporary table while specifying a database
- val message = intercept[AnalysisException] {
+ intercept[AnalysisException] {
spark.sql(
s"""
|CREATE TEMPORARY TABLE db.t
@@ -1307,7 +1307,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
| path '$path'
|)
""".stripMargin)
- }.getMessage
+ }
// If you use backticks to quote the name then it's OK.
spark.sql(
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
index b8761e9de2..9fa1fb931d 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
@@ -350,7 +350,7 @@ class OrcQuerySuite extends QueryTest with BeforeAndAfterAll with OrcTest {
spark.sql(
s"""CREATE TABLE empty_orc(key INT, value STRING)
|STORED AS ORC
- |LOCATION '$path'
+ |LOCATION '${dir.toURI}'
""".stripMargin)
val emptyDF = Seq.empty[(Int, String)].toDF("key", "value").coalesce(1)
@@ -451,7 +451,7 @@ class OrcQuerySuite extends QueryTest with BeforeAndAfterAll with OrcTest {
s"""
|CREATE TABLE dummy_orc(key INT, value STRING)
|STORED AS ORC
- |LOCATION '$path'
+ |LOCATION '${dir.toURI}'
""".stripMargin)
spark.sql(
@@ -500,7 +500,7 @@ class OrcQuerySuite extends QueryTest with BeforeAndAfterAll with OrcTest {
|create external table dummy_orc (id long, valueField long)
|partitioned by (partitionValue int)
|stored as orc
- |location "${dir.getAbsolutePath}"""".stripMargin)
+ |location "${dir.toURI}"""".stripMargin)
spark.sql(s"msck repair table dummy_orc")
checkAnswer(spark.sql("select * from dummy_orc"), df)
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala b/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala
index 06566a9550..2446bed58a 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala
@@ -847,7 +847,7 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with Tes
"fs.file.impl.disable.cache" -> "true"
)
withTempPath { dir =>
- val path = "file://" + dir.getCanonicalPath
+ val path = dir.toURI.toString
val df1 = spark.range(4)
df1.coalesce(1).write.mode("overwrite").options(options).format(dataSourceName).save(path)
df1.coalesce(1).write.mode("append").options(options).format(dataSourceName).save(path)