diff options
author | Sean Owen <sowen@cloudera.com> | 2014-05-04 17:43:28 -0700 |
---|---|---|
committer | Patrick Wendell <pwendell@gmail.com> | 2014-05-04 17:43:35 -0700 |
commit | f5041579ff573f988b673c2506fa4edc32f5ad84 (patch) | |
tree | 47ea7373e71db3e24dc1682402ba7161a541b5b7 /core | |
parent | 0c98a8f6a761b941a9e0cf6fde6df8d0b8d24057 (diff) | |
download | spark-f5041579ff573f988b673c2506fa4edc32f5ad84.tar.gz spark-f5041579ff573f988b673c2506fa4edc32f5ad84.tar.bz2 spark-f5041579ff573f988b673c2506fa4edc32f5ad84.zip |
SPARK-1629. Addendum: Depend on commons lang3 (already used by tachyon) as it's used in ReplSuite, and return to use lang3 utility in Utils.scala
For consideration. This was proposed in related discussion: https://github.com/apache/spark/pull/569
Author: Sean Owen <sowen@cloudera.com>
Closes #635 from srowen/SPARK-1629.2 and squashes the following commits:
a442b98 [Sean Owen] Depend on commons lang3 (already used by tachyon) as it's used in ReplSuite, and return to use lang3 utility in Utils.scala
Diffstat (limited to 'core')
-rw-r--r-- | core/pom.xml | 4 | ||||
-rw-r--r-- | core/src/main/scala/org/apache/spark/util/Utils.scala | 5 |
2 files changed, 6 insertions, 3 deletions
diff --git a/core/pom.xml b/core/pom.xml index 822b5b1dd7..36c71e67b5 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -70,6 +70,10 @@ <artifactId>guava</artifactId> </dependency> <dependency> + <groupId>org.apache.commons</groupId> + <artifactId>commons-lang3</artifactId> + </dependency> + <dependency> <groupId>com.google.code.findbugs</groupId> <artifactId>jsr305</artifactId> </dependency> diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala index acd7eef6d2..bef4dab3d7 100644 --- a/core/src/main/scala/org/apache/spark/util/Utils.scala +++ b/core/src/main/scala/org/apache/spark/util/Utils.scala @@ -32,6 +32,7 @@ import scala.util.Try import com.google.common.io.Files import com.google.common.util.concurrent.ThreadFactoryBuilder +import org.apache.commons.lang3.SystemUtils import org.apache.hadoop.fs.{FileSystem, FileUtil, Path} import org.json4s._ import tachyon.client.{TachyonFile,TachyonFS} @@ -1073,9 +1074,7 @@ private[spark] object Utils extends Logging { /** * Return true if this is Windows. */ - def isWindows = { - Option(System.getProperty("os.name")).exists(_.startsWith("Windows")) - } + def isWindows = SystemUtils.IS_OS_WINDOWS /** * Indicates whether Spark is currently running unit tests. |