diff options
author | Sean Owen <sowen@cloudera.com> | 2014-06-22 11:47:49 -0700 |
---|---|---|
committer | Reynold Xin <rxin@apache.org> | 2014-06-22 11:47:49 -0700 |
commit | 9fe28c35df0dc8ac4a54db2a528fb7ae56d3f978 (patch) | |
tree | 07f019b3f5e3f3f1a49f3bf17d2182b9d4848574 /core | |
parent | 476581e8c8ca03a5940c404fee8a06361ff94cb5 (diff) | |
download | spark-9fe28c35df0dc8ac4a54db2a528fb7ae56d3f978.tar.gz spark-9fe28c35df0dc8ac4a54db2a528fb7ae56d3f978.tar.bz2 spark-9fe28c35df0dc8ac4a54db2a528fb7ae56d3f978.zip |
SPARK-1316. Remove use of Commons IO
Commons IO is actually barely used, and is not a declared dependency. This just replaces with equivalents from the JDK and Guava.
Author: Sean Owen <sowen@cloudera.com>
Closes #1173 from srowen/SPARK-1316 and squashes the following commits:
2eb53db [Sean Owen] Reorder Guava import
8fde404 [Sean Owen] Remove use of Commons IO, which is not actually a dependency
Diffstat (limited to 'core')
-rw-r--r-- | core/src/main/scala/org/apache/spark/util/logging/RollingFileAppender.scala | 6 | ||||
-rw-r--r-- | core/src/test/scala/org/apache/spark/util/FileAppenderSuite.scala | 17 |
2 files changed, 13 insertions, 10 deletions
diff --git a/core/src/main/scala/org/apache/spark/util/logging/RollingFileAppender.scala b/core/src/main/scala/org/apache/spark/util/logging/RollingFileAppender.scala index 1bbbd20cf0..e579421676 100644 --- a/core/src/main/scala/org/apache/spark/util/logging/RollingFileAppender.scala +++ b/core/src/main/scala/org/apache/spark/util/logging/RollingFileAppender.scala @@ -19,7 +19,7 @@ package org.apache.spark.util.logging import java.io.{File, FileFilter, InputStream} -import org.apache.commons.io.FileUtils +import com.google.common.io.Files import org.apache.spark.SparkConf import RollingFileAppender._ @@ -83,7 +83,7 @@ private[spark] class RollingFileAppender( logDebug(s"Attempting to rollover file $activeFile to file $rolloverFile") if (activeFile.exists) { if (!rolloverFile.exists) { - FileUtils.moveFile(activeFile, rolloverFile) + Files.move(activeFile, rolloverFile) logInfo(s"Rolled over $activeFile to $rolloverFile") } else { // In case the rollover file name clashes, make a unique file name. @@ -100,7 +100,7 @@ private[spark] class RollingFileAppender( logWarning(s"Rollover file $rolloverFile already exists, " + s"rolled over $activeFile to file $altRolloverFile") - FileUtils.moveFile(activeFile, altRolloverFile) + Files.move(activeFile, altRolloverFile) } } else { logWarning(s"File $activeFile does not exist") diff --git a/core/src/test/scala/org/apache/spark/util/FileAppenderSuite.scala b/core/src/test/scala/org/apache/spark/util/FileAppenderSuite.scala index 02e228945b..ca37d707b0 100644 --- a/core/src/test/scala/org/apache/spark/util/FileAppenderSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/FileAppenderSuite.scala @@ -18,13 +18,16 @@ package org.apache.spark.util import java.io._ +import java.nio.charset.Charset import scala.collection.mutable.HashSet import scala.reflect._ -import org.apache.commons.io.{FileUtils, IOUtils} -import org.apache.spark.{Logging, SparkConf} import org.scalatest.{BeforeAndAfter, FunSuite} + +import com.google.common.io.Files + +import org.apache.spark.{Logging, SparkConf} import org.apache.spark.util.logging.{RollingFileAppender, SizeBasedRollingPolicy, TimeBasedRollingPolicy, FileAppender} class FileAppenderSuite extends FunSuite with BeforeAndAfter with Logging { @@ -41,11 +44,11 @@ class FileAppenderSuite extends FunSuite with BeforeAndAfter with Logging { test("basic file appender") { val testString = (1 to 1000).mkString(", ") - val inputStream = IOUtils.toInputStream(testString) + val inputStream = new ByteArrayInputStream(testString.getBytes(Charset.forName("UTF-8"))) val appender = new FileAppender(inputStream, testFile) inputStream.close() appender.awaitTermination() - assert(FileUtils.readFileToString(testFile) === testString) + assert(Files.toString(testFile, Charset.forName("UTF-8")) === testString) } test("rolling file appender - time-based rolling") { @@ -93,7 +96,7 @@ class FileAppenderSuite extends FunSuite with BeforeAndAfter with Logging { val allGeneratedFiles = new HashSet[String]() val items = (1 to 10).map { _.toString * 10000 } for (i <- 0 until items.size) { - testOutputStream.write(items(i).getBytes("UTF8")) + testOutputStream.write(items(i).getBytes(Charset.forName("UTF-8"))) testOutputStream.flush() allGeneratedFiles ++= RollingFileAppender.getSortedRolledOverFiles( testFile.getParentFile.toString, testFile.getName).map(_.toString) @@ -197,7 +200,7 @@ class FileAppenderSuite extends FunSuite with BeforeAndAfter with Logging { // send data to appender through the input stream, and wait for the data to be written val expectedText = textToAppend.mkString("") for (i <- 0 until textToAppend.size) { - outputStream.write(textToAppend(i).getBytes("UTF8")) + outputStream.write(textToAppend(i).getBytes(Charset.forName("UTF-8"))) outputStream.flush() Thread.sleep(sleepTimeBetweenTexts) } @@ -212,7 +215,7 @@ class FileAppenderSuite extends FunSuite with BeforeAndAfter with Logging { logInfo("Filtered files: \n" + generatedFiles.mkString("\n")) assert(generatedFiles.size > 1) val allText = generatedFiles.map { file => - FileUtils.readFileToString(file) + Files.toString(file, Charset.forName("UTF-8")) }.mkString("") assert(allText === expectedText) generatedFiles |