aboutsummaryrefslogtreecommitdiff
path: root/core/src
diff options
context:
space:
mode:
authorhyukjinkwon <gurwls223@gmail.com>2016-09-07 19:24:03 +0900
committerKousuke Saruta <sarutak@oss.nttdata.co.jp>2016-09-07 19:24:03 +0900
commit6b41195bca65de6236168d96758f93b85f1dd7ca (patch)
tree039cd03fcbf3901c2ca839030d6695c9fd8e8bca /core/src
parent3ce3a282c8463408f9a2db93c1748e8df8087e07 (diff)
downloadspark-6b41195bca65de6236168d96758f93b85f1dd7ca.tar.gz
spark-6b41195bca65de6236168d96758f93b85f1dd7ca.tar.bz2
spark-6b41195bca65de6236168d96758f93b85f1dd7ca.zip
[SPARK-17339][SPARKR][CORE] Fix some R tests and use Path.toUri in SparkContext for Windows paths in SparkR
## What changes were proposed in this pull request? This PR fixes the Windows path issues in several APIs. Please refer https://issues.apache.org/jira/browse/SPARK-17339 for more details. ## How was this patch tested? Tests via AppVeyor CI - https://ci.appveyor.com/project/HyukjinKwon/spark/build/82-SPARK-17339-fix-r Also, manually, ![2016-09-06 3 14 38](https://cloud.githubusercontent.com/assets/6477701/18263406/b93a98be-7444-11e6-9521-b28ee65a4771.png) Author: hyukjinkwon <gurwls223@gmail.com> Closes #14960 from HyukjinKwon/SPARK-17339.
Diffstat (limited to 'core/src')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala4
1 files changed, 2 insertions, 2 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 744d5d0f7a..4aa795a58a 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -992,7 +992,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
// This is a hack to enforce loading hdfs-site.xml.
// See SPARK-11227 for details.
- FileSystem.get(new URI(path), hadoopConfiguration)
+ FileSystem.getLocal(hadoopConfiguration)
// A Hadoop configuration can be about 10 KB, which is pretty big, so broadcast it.
val confBroadcast = broadcast(new SerializableConfiguration(hadoopConfiguration))
@@ -1081,7 +1081,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
// This is a hack to enforce loading hdfs-site.xml.
// See SPARK-11227 for details.
- FileSystem.get(new URI(path), hadoopConfiguration)
+ FileSystem.getLocal(hadoopConfiguration)
// The call to NewHadoopJob automatically adds security credentials to conf,
// so we don't need to explicitly add them ourselves