aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2014-03-25 10:21:25 -0700
committerReynold Xin <rxin@apache.org>2014-03-25 10:21:25 -0700
commit71d4ed271bcbddb154643bd44297ed77190e75cf (patch)
treea6618c610ce4d9001ca8e5b08d4811e3105ecfc3 /core
parent134ace7fea7f772f5bafa9d11b8677cb7d311266 (diff)
downloadspark-71d4ed271bcbddb154643bd44297ed77190e75cf.tar.gz
spark-71d4ed271bcbddb154643bd44297ed77190e75cf.tar.bz2
spark-71d4ed271bcbddb154643bd44297ed77190e75cf.zip
SPARK-1316. Remove use of Commons IO
(This follows from a side point on SPARK-1133, in discussion of the PR: https://github.com/apache/spark/pull/164 ) Commons IO is barely used in the project, and can easily be replaced with equivalent calls to Guava or the existing Spark `Utils.scala` class. Removing a dependency feels good, and this one in particular can get a little problematic since Hadoop uses it too. Author: Sean Owen <sowen@cloudera.com> Closes #226 from srowen/SPARK-1316 and squashes the following commits: 21efef3 [Sean Owen] Remove use of Commons IO
Diffstat (limited to 'core')
-rw-r--r--core/pom.xml5
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala5
-rw-r--r--core/src/test/scala/org/apache/spark/util/UtilsSuite.scala3
3 files changed, 5 insertions, 8 deletions
diff --git a/core/pom.xml b/core/pom.xml
index a6f478b09b..eb6cc4d310 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -201,11 +201,6 @@
<scope>test</scope>
</dependency>
<dependency>
- <groupId>commons-io</groupId>
- <artifactId>commons-io</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.binary.version}</artifactId>
<scope>test</scope>
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 13d9dbdd9a..ad87fda140 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -529,7 +529,10 @@ private[spark] object Utils extends Logging {
}
}
if (!file.delete()) {
- throw new IOException("Failed to delete: " + file)
+ // Delete can also fail if the file simply did not exist
+ if (file.exists()) {
+ throw new IOException("Failed to delete: " + file.getAbsolutePath)
+ }
}
}
diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index 8f55b2372c..eb8f591560 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -24,7 +24,6 @@ import java.nio.{ByteBuffer, ByteOrder}
import com.google.common.base.Charsets
import com.google.common.io.Files
-import org.apache.commons.io.FileUtils
import org.scalatest.FunSuite
class UtilsSuite extends FunSuite {
@@ -136,7 +135,7 @@ class UtilsSuite extends FunSuite {
// Read some nonexistent bytes on both ends
assert(Utils.offsetBytes(f1Path, -3, 25) === "1\n2\n3\n4\n5\n6\n7\n8\n9\n")
- FileUtils.deleteDirectory(tmpDir2)
+ Utils.deleteRecursively(tmpDir2)
}
test("deserialize long value") {