aboutsummaryrefslogtreecommitdiff
path: root/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
diff options
context:
space:
mode:
Diffstat (limited to 'core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala')
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala9
1 files changed, 5 insertions, 4 deletions
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
index 8e8007f4eb..5fd599e190 100644
--- a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
@@ -20,13 +20,13 @@ package org.apache.spark.deploy.history
import java.io.{BufferedOutputStream, ByteArrayInputStream, ByteArrayOutputStream, File,
FileOutputStream, OutputStreamWriter}
import java.net.URI
+import java.nio.charset.StandardCharsets
import java.util.concurrent.TimeUnit
import java.util.zip.{ZipInputStream, ZipOutputStream}
import scala.concurrent.duration._
import scala.language.postfixOps
-import com.google.common.base.Charsets
import com.google.common.io.{ByteStreams, Files}
import org.apache.hadoop.hdfs.DistributedFileSystem
import org.json4s.jackson.JsonMethods._
@@ -320,8 +320,9 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc
var entry = inputStream.getNextEntry
entry should not be null
while (entry != null) {
- val actual = new String(ByteStreams.toByteArray(inputStream), Charsets.UTF_8)
- val expected = Files.toString(logs.find(_.getName == entry.getName).get, Charsets.UTF_8)
+ val actual = new String(ByteStreams.toByteArray(inputStream), StandardCharsets.UTF_8)
+ val expected =
+ Files.toString(logs.find(_.getName == entry.getName).get, StandardCharsets.UTF_8)
actual should be (expected)
totalEntries += 1
entry = inputStream.getNextEntry
@@ -415,7 +416,7 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc
if (isNewFormat) {
EventLoggingListener.initEventLog(new FileOutputStream(file))
}
- val writer = new OutputStreamWriter(bstream, "UTF-8")
+ val writer = new OutputStreamWriter(bstream, StandardCharsets.UTF_8)
Utils.tryWithSafeFinally {
events.foreach(e => writer.write(compact(render(JsonProtocol.sparkEventToJson(e))) + "\n"))
} {