aboutsummaryrefslogtreecommitdiff
path: root/core/src/test
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2015-04-27 19:50:55 -0400
committerSean Owen <sowen@cloudera.com>2015-04-27 19:50:55 -0400
commitab5adb7a973eec9d95c7575c864cba9f8d83a0fd (patch)
treeae05e0003efe275028382b16a842660f6e50e43c /core/src/test
parent5d45e1f60059e2f2fc8ad64778b9ddcc8887c570 (diff)
downloadspark-ab5adb7a973eec9d95c7575c864cba9f8d83a0fd.tar.gz
spark-ab5adb7a973eec9d95c7575c864cba9f8d83a0fd.tar.bz2
spark-ab5adb7a973eec9d95c7575c864cba9f8d83a0fd.zip
[SPARK-7145] [CORE] commons-lang (2.x) classes used instead of commons-lang3 (3.x); commons-io used without dependency
Remove use of commons-lang in favor of commons-lang3 classes; remove commons-io use in favor of Guava Author: Sean Owen <sowen@cloudera.com> Closes #5703 from srowen/SPARK-7145 and squashes the following commits: 21fbe03 [Sean Owen] Remove use of commons-lang in favor of commons-lang3 classes; remove commons-io use in favor of Guava
Diffstat (limited to 'core/src/test')
-rw-r--r--core/src/test/scala/org/apache/spark/FileServerSuite.scala7
-rw-r--r--core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala4
-rw-r--r--core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala10
3 files changed, 12 insertions, 9 deletions
diff --git a/core/src/test/scala/org/apache/spark/FileServerSuite.scala b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
index a69e9b761f..c0439f9348 100644
--- a/core/src/test/scala/org/apache/spark/FileServerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
@@ -22,8 +22,7 @@ import java.net.URI
import java.util.jar.{JarEntry, JarOutputStream}
import javax.net.ssl.SSLException
-import com.google.common.io.ByteStreams
-import org.apache.commons.io.{FileUtils, IOUtils}
+import com.google.common.io.{ByteStreams, Files}
import org.apache.commons.lang3.RandomUtils
import org.scalatest.FunSuite
@@ -239,7 +238,7 @@ class FileServerSuite extends FunSuite with LocalSparkContext {
def fileTransferTest(server: HttpFileServer, sm: SecurityManager = null): Unit = {
val randomContent = RandomUtils.nextBytes(100)
val file = File.createTempFile("FileServerSuite", "sslTests", tmpDir)
- FileUtils.writeByteArrayToFile(file, randomContent)
+ Files.write(randomContent, file)
server.addFile(file)
val uri = new URI(server.serverUri + "/files/" + file.getName)
@@ -254,7 +253,7 @@ class FileServerSuite extends FunSuite with LocalSparkContext {
Utils.setupSecureURLConnection(connection, sm)
}
- val buf = IOUtils.toByteArray(connection.getInputStream)
+ val buf = ByteStreams.toByteArray(connection.getInputStream)
assert(buf === randomContent)
}
diff --git a/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala b/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala
index 190b08d950..ef3e213f1f 100644
--- a/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala
@@ -21,7 +21,7 @@ import java.io.{File, FileWriter, PrintWriter}
import scala.collection.mutable.ArrayBuffer
-import org.apache.commons.lang.math.RandomUtils
+import org.apache.commons.lang3.RandomUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.io.{LongWritable, Text}
@@ -60,7 +60,7 @@ class InputOutputMetricsSuite extends FunSuite with SharedSparkContext
tmpFile = new File(testTempDir, getClass.getSimpleName + ".txt")
val pw = new PrintWriter(new FileWriter(tmpFile))
for (x <- 1 to numRecords) {
- pw.println(RandomUtils.nextInt(numBuckets))
+ pw.println(RandomUtils.nextInt(0, numBuckets))
}
pw.close()
diff --git a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala
index 94bfa67451..46d2e5173a 100644
--- a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala
+++ b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala
@@ -17,14 +17,16 @@
package org.apache.spark.network.netty
+import java.io.InputStreamReader
import java.nio._
+import java.nio.charset.Charset
import java.util.concurrent.TimeUnit
import scala.concurrent.duration._
import scala.concurrent.{Await, Promise}
import scala.util.{Failure, Success, Try}
-import org.apache.commons.io.IOUtils
+import com.google.common.io.CharStreams
import org.apache.spark.network.buffer.{ManagedBuffer, NioManagedBuffer}
import org.apache.spark.network.shuffle.BlockFetchingListener
import org.apache.spark.network.{BlockDataManager, BlockTransferService}
@@ -32,7 +34,7 @@ import org.apache.spark.storage.{BlockId, ShuffleBlockId}
import org.apache.spark.{SecurityManager, SparkConf}
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
-import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FunSuite, ShouldMatchers}
+import org.scalatest.{FunSuite, ShouldMatchers}
class NettyBlockTransferSecuritySuite extends FunSuite with MockitoSugar with ShouldMatchers {
test("security default off") {
@@ -113,7 +115,9 @@ class NettyBlockTransferSecuritySuite extends FunSuite with MockitoSugar with Sh
val result = fetchBlock(exec0, exec1, "1", blockId) match {
case Success(buf) =>
- IOUtils.toString(buf.createInputStream()) should equal(blockString)
+ val actualString = CharStreams.toString(
+ new InputStreamReader(buf.createInputStream(), Charset.forName("UTF-8")))
+ actualString should equal(blockString)
buf.release()
Success()
case Failure(t) =>