aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/PartitioningAwareFileCatalog.scala10
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileCatalogSuite.scala45
2 files changed, 53 insertions, 2 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/PartitioningAwareFileCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/PartitioningAwareFileCatalog.scala
index d2d5b56c82..702ba97222 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/PartitioningAwareFileCatalog.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/PartitioningAwareFileCatalog.scala
@@ -76,7 +76,15 @@ abstract class PartitioningAwareFileCatalog(
paths.flatMap { path =>
// Make the path qualified (consistent with listLeafFiles and listLeafFilesInParallel).
val fs = path.getFileSystem(hadoopConf)
- val qualifiedPath = fs.makeQualified(path)
+ val qualifiedPathPre = fs.makeQualified(path)
+ val qualifiedPath: Path = if (qualifiedPathPre.isRoot && !qualifiedPathPre.isAbsolute) {
+ // SPARK-17613: Always append `Path.SEPARATOR` to the end of parent directories,
+ // because the `leafFile.getParent` would have returned an absolute path with the
+ // separator at the end.
+ new Path(qualifiedPathPre, Path.SEPARATOR)
+ } else {
+ qualifiedPathPre
+ }
// There are three cases possible with each path
// 1. The path is a directory and has children files in it. Then it must be present in
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileCatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileCatalogSuite.scala
index 5c8d3226e9..fa3abd0098 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileCatalogSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileCatalogSuite.scala
@@ -18,10 +18,12 @@
package org.apache.spark.sql.execution.datasources
import java.io.File
+import java.net.URI
+import scala.collection.mutable
import scala.language.reflectiveCalls
-import org.apache.hadoop.fs.Path
+import org.apache.hadoop.fs.{FileStatus, Path, RawLocalFileSystem}
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.test.SharedSQLContext
@@ -78,4 +80,45 @@ class FileCatalogSuite extends SharedSQLContext {
assert(catalog1.listLeafFiles(catalog1.paths).isEmpty)
}
}
+
+ test("SPARK-17613 - PartitioningAwareFileCatalog: base path w/o '/' at end") {
+ class MockCatalog(
+ override val paths: Seq[Path]) extends PartitioningAwareFileCatalog(spark, Map.empty, None) {
+
+ override def refresh(): Unit = {}
+
+ override def leafFiles: mutable.LinkedHashMap[Path, FileStatus] = mutable.LinkedHashMap(
+ new Path("mockFs://some-bucket/file1.json") -> new FileStatus()
+ )
+
+ override def leafDirToChildrenFiles: Map[Path, Array[FileStatus]] = Map(
+ new Path("mockFs://some-bucket/") -> Array(new FileStatus())
+ )
+
+ override def partitionSpec(): PartitionSpec = {
+ PartitionSpec.emptySpec
+ }
+ }
+
+ withSQLConf(
+ "fs.mockFs.impl" -> classOf[FakeParentPathFileSystem].getName,
+ "fs.mockFs.impl.disable.cache" -> "true") {
+ val pathWithSlash = new Path("mockFs://some-bucket/")
+ assert(pathWithSlash.getParent === null)
+ val pathWithoutSlash = new Path("mockFs://some-bucket")
+ assert(pathWithoutSlash.getParent === null)
+ val catalog1 = new MockCatalog(Seq(pathWithSlash))
+ val catalog2 = new MockCatalog(Seq(pathWithoutSlash))
+ assert(catalog1.allFiles().nonEmpty)
+ assert(catalog2.allFiles().nonEmpty)
+ }
+ }
+}
+
+class FakeParentPathFileSystem extends RawLocalFileSystem {
+ override def getScheme: String = "mockFs"
+
+ override def getUri: URI = {
+ URI.create("mockFs://some-bucket")
+ }
}