diff options
Diffstat (limited to 'sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala')
-rw-r--r-- | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala | 8 |
1 files changed, 4 insertions, 4 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala index 07bc8ae148..4e7214ce83 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala @@ -131,7 +131,7 @@ case class DataSource( val allPaths = caseInsensitiveOptions.get("path") val globbedPaths = allPaths.toSeq.flatMap { path => val hdfsPath = new Path(path) - val fs = hdfsPath.getFileSystem(sqlContext.sparkContext.hadoopConfiguration) + val fs = hdfsPath.getFileSystem(sqlContext.sessionState.hadoopConf) val qualified = hdfsPath.makeQualified(fs.getUri, fs.getWorkingDirectory) SparkHadoopUtil.get.globPathIfNecessary(qualified) }.toArray @@ -225,7 +225,7 @@ case class DataSource( case Seq(singlePath) => try { val hdfsPath = new Path(singlePath) - val fs = hdfsPath.getFileSystem(sqlContext.sparkContext.hadoopConfiguration) + val fs = hdfsPath.getFileSystem(sqlContext.sessionState.hadoopConf) val metadataPath = new Path(hdfsPath, FileStreamSink.metadataDir) val res = fs.exists(metadataPath) res @@ -284,7 +284,7 @@ case class DataSource( val allPaths = caseInsensitiveOptions.get("path") ++ paths val globbedPaths = allPaths.flatMap { path => val hdfsPath = new Path(path) - val fs = hdfsPath.getFileSystem(sqlContext.sparkContext.hadoopConfiguration) + val fs = hdfsPath.getFileSystem(sqlContext.sessionState.hadoopConf) val qualified = hdfsPath.makeQualified(fs.getUri, fs.getWorkingDirectory) val globPath = SparkHadoopUtil.get.globPathIfNecessary(qualified) @@ -374,7 +374,7 @@ case class DataSource( val path = new Path(caseInsensitiveOptions.getOrElse("path", { throw new IllegalArgumentException("'path' is not specified") })) - val fs = path.getFileSystem(sqlContext.sparkContext.hadoopConfiguration) + val fs = path.getFileSystem(sqlContext.sessionState.hadoopConf) path.makeQualified(fs.getUri, fs.getWorkingDirectory) } |