aboutsummaryrefslogtreecommitdiff
path: root/core/src/main/scala
diff options
context:
space:
mode:
authorshitis <ssaxena.ece@gmail.com>2014-10-14 14:16:45 -0700
committerAndrew Or <andrewor14@gmail.com>2014-10-14 14:16:45 -0700
commit24b818b971ba715b6796518e4c6afdecb1b16f15 (patch)
treed7f281e88e09b4a2d70dc683742e51ada86c0db1 /core/src/main/scala
parent7ced88b0d6b4d90c262f19afa99c02b51c0ea5ea (diff)
downloadspark-24b818b971ba715b6796518e4c6afdecb1b16f15.tar.gz
spark-24b818b971ba715b6796518e4c6afdecb1b16f15.tar.bz2
spark-24b818b971ba715b6796518e4c6afdecb1b16f15.zip
[SPARK-3944][Core] Using Option[String] where value of String can be null
Author: shitis <ssaxena.ece@gmail.com> Closes #2795 from Shiti/master and squashes the following commits: 46897d7 [shitis] Using Option Wrapper to convert String with value null to None
Diffstat (limited to 'core/src/main/scala')
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala26
1 files changed, 14 insertions, 12 deletions
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 07477dd460..aad901620f 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -340,8 +340,8 @@ private[spark] object Utils extends Logging {
val targetFile = new File(targetDir, filename)
val uri = new URI(url)
val fileOverwrite = conf.getBoolean("spark.files.overwrite", defaultValue = false)
- uri.getScheme match {
- case "http" | "https" | "ftp" =>
+ Option(uri.getScheme) match {
+ case Some("http") | Some("https") | Some("ftp") =>
logInfo("Fetching " + url + " to " + tempFile)
var uc: URLConnection = null
@@ -374,7 +374,7 @@ private[spark] object Utils extends Logging {
}
}
Files.move(tempFile, targetFile)
- case "file" | null =>
+ case Some("file") | None =>
// In the case of a local file, copy the local file to the target directory.
// Note the difference between uri vs url.
val sourceFile = if (uri.isAbsolute) new File(uri) else new File(url)
@@ -403,7 +403,7 @@ private[spark] object Utils extends Logging {
logInfo("Copying " + sourceFile.getAbsolutePath + " to " + targetFile.getAbsolutePath)
Files.copy(sourceFile, targetFile)
}
- case _ =>
+ case Some(other) =>
// Use the Hadoop filesystem library, which supports file://, hdfs://, s3://, and others
val fs = getHadoopFileSystem(uri, hadoopConf)
val in = fs.open(new Path(uri))
@@ -1368,16 +1368,17 @@ private[spark] object Utils extends Logging {
if (uri.getPath == null) {
throw new IllegalArgumentException(s"Given path is malformed: $uri")
}
- uri.getScheme match {
- case windowsDrive(d) if windows =>
+
+ Option(uri.getScheme) match {
+ case Some(windowsDrive(d)) if windows =>
new URI("file:/" + uri.toString.stripPrefix("/"))
- case null =>
+ case None =>
// Preserve fragments for HDFS file name substitution (denoted by "#")
// For instance, in "abc.py#xyz.py", "xyz.py" is the name observed by the application
val fragment = uri.getFragment
val part = new File(uri.getPath).toURI
new URI(part.getScheme, part.getPath, fragment)
- case _ =>
+ case Some(other) =>
uri
}
}
@@ -1399,10 +1400,11 @@ private[spark] object Utils extends Logging {
} else {
paths.split(",").filter { p =>
val formattedPath = if (windows) formatWindowsPath(p) else p
- new URI(formattedPath).getScheme match {
- case windowsDrive(d) if windows => false
- case "local" | "file" | null => false
- case _ => true
+ val uri = new URI(formattedPath)
+ Option(uri.getScheme) match {
+ case Some(windowsDrive(d)) if windows => false
+ case Some("local") | Some("file") | None => false
+ case Some(other) => true
}
}
}