aboutsummaryrefslogtreecommitdiff
path: root/core/src
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2017-04-10 20:11:56 +0100
committerSean Owen <sowen@cloudera.com>2017-04-10 20:11:56 +0100
commita26e3ed5e414d0a350cfe65dd511b154868b9f1d (patch)
treef8bf8feabae7acdd5b2c29e38273fddb80e3de33 /core/src
parentfd711ea13e558f0e7d3e01f08e01444d394499a6 (diff)
downloadspark-a26e3ed5e414d0a350cfe65dd511b154868b9f1d.tar.gz
spark-a26e3ed5e414d0a350cfe65dd511b154868b9f1d.tar.bz2
spark-a26e3ed5e414d0a350cfe65dd511b154868b9f1d.zip
[SPARK-20156][CORE][SQL][STREAMING][MLLIB] Java String toLowerCase "Turkish locale bug" causes Spark problems
## What changes were proposed in this pull request? Add Locale.ROOT to internal calls to String `toLowerCase`, `toUpperCase`, to avoid inadvertent locale-sensitive variation in behavior (aka the "Turkish locale problem"). The change looks large but it is just adding `Locale.ROOT` (the locale with no country or language specified) to every call to these methods. ## How was this patch tested? Existing tests. Author: Sean Owen <sowen@cloudera.com> Closes #17527 from srowen/SPARK-20156.
Diffstat (limited to 'core/src')
-rw-r--r--core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java3
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/SparkEnv.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/io/CompressionCodec.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala6
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/sink/Slf4jSink.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala18
-rw-r--r--core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/ui/exec/ExecutorThreadDumpPage.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/JobPage.scala4
-rw-r--r--core/src/test/scala/org/apache/spark/ShuffleSuite.scala4
-rw-r--r--core/src/test/scala/org/apache/spark/broadcast/BroadcastSuite.scala4
-rw-r--r--core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/storage/BlockManagerReplicationSuite.scala6
-rw-r--r--core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala5
-rw-r--r--core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala5
-rw-r--r--core/src/test/scala/org/apache/spark/ui/UISuite.scala11
22 files changed, 64 insertions, 44 deletions
diff --git a/core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java b/core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java
index b38639e854..dff4f5df68 100644
--- a/core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java
+++ b/core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java
@@ -21,6 +21,7 @@ import org.apache.spark.util.EnumUtil;
import java.util.Collections;
import java.util.HashSet;
+import java.util.Locale;
import java.util.Set;
public enum TaskSorting {
@@ -35,7 +36,7 @@ public enum TaskSorting {
}
public static TaskSorting fromString(String str) {
- String lower = str.toLowerCase();
+ String lower = str.toLowerCase(Locale.ROOT);
for (TaskSorting t: values()) {
if (t.alternateNames.contains(lower)) {
return t;
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 0225fd6056..99efc4893f 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -361,7 +361,7 @@ class SparkContext(config: SparkConf) extends Logging {
*/
def setLogLevel(logLevel: String) {
// let's allow lowercase or mixed case too
- val upperCased = logLevel.toUpperCase(Locale.ENGLISH)
+ val upperCased = logLevel.toUpperCase(Locale.ROOT)
require(SparkContext.VALID_LOG_LEVELS.contains(upperCased),
s"Supplied level $logLevel did not match one of:" +
s" ${SparkContext.VALID_LOG_LEVELS.mkString(",")}")
diff --git a/core/src/main/scala/org/apache/spark/SparkEnv.scala b/core/src/main/scala/org/apache/spark/SparkEnv.scala
index 539dbb55ee..f4a59f069a 100644
--- a/core/src/main/scala/org/apache/spark/SparkEnv.scala
+++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala
@@ -19,6 +19,7 @@ package org.apache.spark
import java.io.File
import java.net.Socket
+import java.util.Locale
import scala.collection.mutable
import scala.util.Properties
@@ -319,7 +320,8 @@ object SparkEnv extends Logging {
"sort" -> classOf[org.apache.spark.shuffle.sort.SortShuffleManager].getName,
"tungsten-sort" -> classOf[org.apache.spark.shuffle.sort.SortShuffleManager].getName)
val shuffleMgrName = conf.get("spark.shuffle.manager", "sort")
- val shuffleMgrClass = shortShuffleMgrNames.getOrElse(shuffleMgrName.toLowerCase, shuffleMgrName)
+ val shuffleMgrClass =
+ shortShuffleMgrNames.getOrElse(shuffleMgrName.toLowerCase(Locale.ROOT), shuffleMgrName)
val shuffleManager = instantiateClass[ShuffleManager](shuffleMgrClass)
val useLegacyMemoryManager = conf.getBoolean("spark.memory.useLegacyMode", false)
diff --git a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
index ba0096d874..b2b26ee107 100644
--- a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
@@ -19,6 +19,7 @@ package org.apache.spark.executor
import java.net.URL
import java.nio.ByteBuffer
+import java.util.Locale
import java.util.concurrent.atomic.AtomicBoolean
import scala.collection.mutable
@@ -72,7 +73,7 @@ private[spark] class CoarseGrainedExecutorBackend(
def extractLogUrls: Map[String, String] = {
val prefix = "SPARK_LOG_URL_"
sys.env.filterKeys(_.startsWith(prefix))
- .map(e => (e._1.substring(prefix.length).toLowerCase, e._2))
+ .map(e => (e._1.substring(prefix.length).toLowerCase(Locale.ROOT), e._2))
}
override def receive: PartialFunction[Any, Unit] = {
diff --git a/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala b/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
index c216fe477f..0cb16f0627 100644
--- a/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
+++ b/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
@@ -18,6 +18,7 @@
package org.apache.spark.io
import java.io._
+import java.util.Locale
import com.ning.compress.lzf.{LZFInputStream, LZFOutputStream}
import net.jpountz.lz4.LZ4BlockOutputStream
@@ -66,7 +67,8 @@ private[spark] object CompressionCodec {
}
def createCodec(conf: SparkConf, codecName: String): CompressionCodec = {
- val codecClass = shortCompressionCodecNames.getOrElse(codecName.toLowerCase, codecName)
+ val codecClass =
+ shortCompressionCodecNames.getOrElse(codecName.toLowerCase(Locale.ROOT), codecName)
val codec = try {
val ctor = Utils.classForName(codecClass).getConstructor(classOf[SparkConf])
Some(ctor.newInstance(conf).asInstanceOf[CompressionCodec])
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala
index 81b9056b40..fce556fd03 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala
@@ -17,7 +17,7 @@
package org.apache.spark.metrics.sink
-import java.util.Properties
+import java.util.{Locale, Properties}
import java.util.concurrent.TimeUnit
import com.codahale.metrics.{ConsoleReporter, MetricRegistry}
@@ -39,7 +39,7 @@ private[spark] class ConsoleSink(val property: Properties, val registry: MetricR
}
val pollUnit: TimeUnit = Option(property.getProperty(CONSOLE_KEY_UNIT)) match {
- case Some(s) => TimeUnit.valueOf(s.toUpperCase())
+ case Some(s) => TimeUnit.valueOf(s.toUpperCase(Locale.ROOT))
case None => TimeUnit.valueOf(CONSOLE_DEFAULT_UNIT)
}
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala
index 9d5f2ae932..88bba2fdbd 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala
@@ -42,7 +42,7 @@ private[spark] class CsvSink(val property: Properties, val registry: MetricRegis
}
val pollUnit: TimeUnit = Option(property.getProperty(CSV_KEY_UNIT)) match {
- case Some(s) => TimeUnit.valueOf(s.toUpperCase())
+ case Some(s) => TimeUnit.valueOf(s.toUpperCase(Locale.ROOT))
case None => TimeUnit.valueOf(CSV_DEFAULT_UNIT)
}
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala
index 22454e50b1..23e31823f4 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala
@@ -18,7 +18,7 @@
package org.apache.spark.metrics.sink
import java.net.InetSocketAddress
-import java.util.Properties
+import java.util.{Locale, Properties}
import java.util.concurrent.TimeUnit
import com.codahale.metrics.MetricRegistry
@@ -59,7 +59,7 @@ private[spark] class GraphiteSink(val property: Properties, val registry: Metric
}
val pollUnit: TimeUnit = propertyToOption(GRAPHITE_KEY_UNIT) match {
- case Some(s) => TimeUnit.valueOf(s.toUpperCase())
+ case Some(s) => TimeUnit.valueOf(s.toUpperCase(Locale.ROOT))
case None => TimeUnit.valueOf(GRAPHITE_DEFAULT_UNIT)
}
@@ -67,7 +67,7 @@ private[spark] class GraphiteSink(val property: Properties, val registry: Metric
MetricsSystem.checkMinimalPollingPeriod(pollUnit, pollPeriod)
- val graphite = propertyToOption(GRAPHITE_KEY_PROTOCOL).map(_.toLowerCase) match {
+ val graphite = propertyToOption(GRAPHITE_KEY_PROTOCOL).map(_.toLowerCase(Locale.ROOT)) match {
case Some("udp") => new GraphiteUDP(new InetSocketAddress(host, port))
case Some("tcp") | None => new Graphite(new InetSocketAddress(host, port))
case Some(p) => throw new Exception(s"Invalid Graphite protocol: $p")
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/Slf4jSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/Slf4jSink.scala
index 773e074336..7fa4ba7622 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/Slf4jSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/Slf4jSink.scala
@@ -17,7 +17,7 @@
package org.apache.spark.metrics.sink
-import java.util.Properties
+import java.util.{Locale, Properties}
import java.util.concurrent.TimeUnit
import com.codahale.metrics.{MetricRegistry, Slf4jReporter}
@@ -42,7 +42,7 @@ private[spark] class Slf4jSink(
}
val pollUnit: TimeUnit = Option(property.getProperty(SLF4J_KEY_UNIT)) match {
- case Some(s) => TimeUnit.valueOf(s.toUpperCase())
+ case Some(s) => TimeUnit.valueOf(s.toUpperCase(Locale.ROOT))
case None => TimeUnit.valueOf(SLF4J_DEFAULT_UNIT)
}
diff --git a/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala b/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala
index af9bdefc96..aecb3a980e 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala
@@ -20,6 +20,7 @@ package org.apache.spark.scheduler
import java.io._
import java.net.URI
import java.nio.charset.StandardCharsets
+import java.util.Locale
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
@@ -316,7 +317,7 @@ private[spark] object EventLoggingListener extends Logging {
}
private def sanitize(str: String): String = {
- str.replaceAll("[ :/]", "-").replaceAll("[.${}'\"]", "_").toLowerCase
+ str.replaceAll("[ :/]", "-").replaceAll("[.${}'\"]", "_").toLowerCase(Locale.ROOT)
}
/**
diff --git a/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala b/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala
index 20cedaf060..4171034361 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala
@@ -18,7 +18,7 @@
package org.apache.spark.scheduler
import java.io.{FileInputStream, InputStream}
-import java.util.{NoSuchElementException, Properties}
+import java.util.{Locale, NoSuchElementException, Properties}
import scala.util.control.NonFatal
import scala.xml.{Node, XML}
@@ -142,7 +142,8 @@ private[spark] class FairSchedulableBuilder(val rootPool: Pool, conf: SparkConf)
defaultValue: SchedulingMode,
fileName: String): SchedulingMode = {
- val xmlSchedulingMode = (poolNode \ SCHEDULING_MODE_PROPERTY).text.trim.toUpperCase
+ val xmlSchedulingMode =
+ (poolNode \ SCHEDULING_MODE_PROPERTY).text.trim.toUpperCase(Locale.ROOT)
val warningMessage = s"Unsupported schedulingMode: $xmlSchedulingMode found in " +
s"Fair Scheduler configuration file: $fileName, using " +
s"the default schedulingMode: $defaultValue for pool: $poolName"
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
index 07aea773fa..c849a16023 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
@@ -18,7 +18,7 @@
package org.apache.spark.scheduler
import java.nio.ByteBuffer
-import java.util.{Timer, TimerTask}
+import java.util.{Locale, Timer, TimerTask}
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicLong
@@ -56,8 +56,7 @@ private[spark] class TaskSchedulerImpl private[scheduler](
val maxTaskFailures: Int,
private[scheduler] val blacklistTrackerOpt: Option[BlacklistTracker],
isLocal: Boolean = false)
- extends TaskScheduler with Logging
-{
+ extends TaskScheduler with Logging {
import TaskSchedulerImpl._
@@ -135,12 +134,13 @@ private[spark] class TaskSchedulerImpl private[scheduler](
private var schedulableBuilder: SchedulableBuilder = null
// default scheduler is FIFO
private val schedulingModeConf = conf.get(SCHEDULER_MODE_PROPERTY, SchedulingMode.FIFO.toString)
- val schedulingMode: SchedulingMode = try {
- SchedulingMode.withName(schedulingModeConf.toUpperCase)
- } catch {
- case e: java.util.NoSuchElementException =>
- throw new SparkException(s"Unrecognized $SCHEDULER_MODE_PROPERTY: $schedulingModeConf")
- }
+ val schedulingMode: SchedulingMode =
+ try {
+ SchedulingMode.withName(schedulingModeConf.toUpperCase(Locale.ROOT))
+ } catch {
+ case e: java.util.NoSuchElementException =>
+ throw new SparkException(s"Unrecognized $SCHEDULER_MODE_PROPERTY: $schedulingModeConf")
+ }
val rootPool: Pool = new Pool("", schedulingMode, 0, 0)
diff --git a/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala b/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
index 6fc66e2374..e15166d11c 100644
--- a/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
@@ -19,6 +19,7 @@ package org.apache.spark.serializer
import java.io._
import java.nio.ByteBuffer
+import java.util.Locale
import javax.annotation.Nullable
import scala.collection.JavaConverters._
@@ -244,7 +245,8 @@ class KryoDeserializationStream(
kryo.readClassAndObject(input).asInstanceOf[T]
} catch {
// DeserializationStream uses the EOF exception to indicate stopping condition.
- case e: KryoException if e.getMessage.toLowerCase.contains("buffer underflow") =>
+ case e: KryoException
+ if e.getMessage.toLowerCase(Locale.ROOT).contains("buffer underflow") =>
throw new EOFException
}
}
diff --git a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorThreadDumpPage.scala b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorThreadDumpPage.scala
index dbcc6402bc..6ce3f511e8 100644
--- a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorThreadDumpPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorThreadDumpPage.scala
@@ -17,6 +17,7 @@
package org.apache.spark.ui.exec
+import java.util.Locale
import javax.servlet.http.HttpServletRequest
import scala.xml.{Node, Text}
@@ -42,7 +43,8 @@ private[ui] class ExecutorThreadDumpPage(parent: ExecutorsTab) extends WebUIPage
val v1 = if (threadTrace1.threadName.contains("Executor task launch")) 1 else 0
val v2 = if (threadTrace2.threadName.contains("Executor task launch")) 1 else 0
if (v1 == v2) {
- threadTrace1.threadName.toLowerCase < threadTrace2.threadName.toLowerCase
+ threadTrace1.threadName.toLowerCase(Locale.ROOT) <
+ threadTrace2.threadName.toLowerCase(Locale.ROOT)
} else {
v1 > v2
}
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/JobPage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/JobPage.scala
index 0ff9e5e941..3131c4a1eb 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/JobPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/JobPage.scala
@@ -17,7 +17,7 @@
package org.apache.spark.ui.jobs
-import java.util.Date
+import java.util.{Date, Locale}
import javax.servlet.http.HttpServletRequest
import scala.collection.mutable.{Buffer, ListBuffer}
@@ -77,7 +77,7 @@ private[ui] class JobPage(parent: JobsTab) extends WebUIPage("job") {
| 'content': '<div class="job-timeline-content" data-toggle="tooltip"' +
| 'data-placement="top" data-html="true"' +
| 'data-title="${jsEscapedName} (Stage ${stageId}.${attemptId})<br>' +
- | 'Status: ${status.toUpperCase}<br>' +
+ | 'Status: ${status.toUpperCase(Locale.ROOT)}<br>' +
| 'Submitted: ${UIUtils.formatDate(new Date(submissionTime))}' +
| '${
if (status != "running") {
diff --git a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
index e626ed3621..58b865969f 100644
--- a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark
-import java.util.Properties
+import java.util.{Locale, Properties}
import java.util.concurrent.{Callable, CyclicBarrier, Executors, ExecutorService}
import org.scalatest.Matchers
@@ -239,7 +239,7 @@ abstract class ShuffleSuite extends SparkFunSuite with Matchers with LocalSparkC
}
assert(thrown.getClass === classOf[SparkException])
- assert(thrown.getMessage.toLowerCase.contains("serializable"))
+ assert(thrown.getMessage.toLowerCase(Locale.ROOT).contains("serializable"))
}
test("shuffle with different compression settings (SPARK-3426)") {
diff --git a/core/src/test/scala/org/apache/spark/broadcast/BroadcastSuite.scala b/core/src/test/scala/org/apache/spark/broadcast/BroadcastSuite.scala
index 82760fe92f..46f9ac6b02 100644
--- a/core/src/test/scala/org/apache/spark/broadcast/BroadcastSuite.scala
+++ b/core/src/test/scala/org/apache/spark/broadcast/BroadcastSuite.scala
@@ -17,6 +17,8 @@
package org.apache.spark.broadcast
+import java.util.Locale
+
import scala.util.Random
import org.scalatest.Assertions
@@ -130,7 +132,7 @@ class BroadcastSuite extends SparkFunSuite with LocalSparkContext with Encryptio
val thrown = intercept[IllegalStateException] {
sc.broadcast(Seq(1, 2, 3))
}
- assert(thrown.getMessage.toLowerCase.contains("stopped"))
+ assert(thrown.getMessage.toLowerCase(Locale.ROOT).contains("stopped"))
}
test("Forbid broadcasting RDD directly") {
diff --git a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
index e2ba0d2a53..b72cd8be24 100644
--- a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
+++ b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
@@ -17,6 +17,7 @@
package org.apache.spark.internal.config
+import java.util.Locale
import java.util.concurrent.TimeUnit
import org.apache.spark.{SparkConf, SparkFunSuite}
@@ -132,7 +133,7 @@ class ConfigEntrySuite extends SparkFunSuite {
val conf = new SparkConf()
val transformationConf = ConfigBuilder(testKey("transformation"))
.stringConf
- .transform(_.toLowerCase())
+ .transform(_.toLowerCase(Locale.ROOT))
.createWithDefault("FOO")
assert(conf.get(transformationConf) === "foo")
diff --git a/core/src/test/scala/org/apache/spark/storage/BlockManagerReplicationSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerReplicationSuite.scala
index 13020acdd3..c100803279 100644
--- a/core/src/test/scala/org/apache/spark/storage/BlockManagerReplicationSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerReplicationSuite.scala
@@ -17,6 +17,8 @@
package org.apache.spark.storage
+import java.util.Locale
+
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration._
import scala.language.implicitConversions
@@ -374,8 +376,8 @@ trait BlockManagerReplicationBehavior extends SparkFunSuite
storageLevels.foreach { storageLevel =>
// Put the block into one of the stores
- val blockId = new TestBlockId(
- "block-with-" + storageLevel.description.replace(" ", "-").toLowerCase)
+ val blockId = TestBlockId(
+ "block-with-" + storageLevel.description.replace(" ", "-").toLowerCase(Locale.ROOT))
val testValue = Array.fill[Byte](blockSize)(1)
stores(0).putSingle(blockId, testValue, storageLevel)
diff --git a/core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala b/core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala
index 38030e0660..499d47b13d 100644
--- a/core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala
@@ -17,6 +17,7 @@
package org.apache.spark.ui
+import java.util.Locale
import javax.servlet.http.HttpServletRequest
import scala.xml.Node
@@ -37,14 +38,14 @@ class StagePageSuite extends SparkFunSuite with LocalSparkContext {
test("peak execution memory should displayed") {
val conf = new SparkConf(false)
- val html = renderStagePage(conf).toString().toLowerCase
+ val html = renderStagePage(conf).toString().toLowerCase(Locale.ROOT)
val targetString = "peak execution memory"
assert(html.contains(targetString))
}
test("SPARK-10543: peak execution memory should be per-task rather than cumulative") {
val conf = new SparkConf(false)
- val html = renderStagePage(conf).toString().toLowerCase
+ val html = renderStagePage(conf).toString().toLowerCase(Locale.ROOT)
// verify min/25/50/75/max show task value not cumulative values
assert(html.contains(s"<td>$peakExecutionMemory.0 b</td>" * 5))
}
diff --git a/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala b/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
index f4c561c737..bdd148875e 100644
--- a/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
@@ -18,6 +18,7 @@
package org.apache.spark.ui
import java.net.{HttpURLConnection, URL}
+import java.util.Locale
import javax.servlet.http.{HttpServletRequest, HttpServletResponse}
import scala.io.Source
@@ -453,8 +454,8 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
eventually(timeout(10 seconds), interval(50 milliseconds)) {
goToUi(sc, "/jobs")
findAll(cssSelector("tbody tr a")).foreach { link =>
- link.text.toLowerCase should include ("count")
- link.text.toLowerCase should not include "unknown"
+ link.text.toLowerCase(Locale.ROOT) should include ("count")
+ link.text.toLowerCase(Locale.ROOT) should not include "unknown"
}
}
}
diff --git a/core/src/test/scala/org/apache/spark/ui/UISuite.scala b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
index f1be0f6de3..0c3d4caeea 100644
--- a/core/src/test/scala/org/apache/spark/ui/UISuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
@@ -19,6 +19,7 @@ package org.apache.spark.ui
import java.net.{BindException, ServerSocket}
import java.net.{URI, URL}
+import java.util.Locale
import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse}
import scala.io.Source
@@ -72,10 +73,10 @@ class UISuite extends SparkFunSuite {
eventually(timeout(10 seconds), interval(50 milliseconds)) {
val html = Source.fromURL(sc.ui.get.webUrl).mkString
assert(!html.contains("random data that should not be present"))
- assert(html.toLowerCase.contains("stages"))
- assert(html.toLowerCase.contains("storage"))
- assert(html.toLowerCase.contains("environment"))
- assert(html.toLowerCase.contains("executors"))
+ assert(html.toLowerCase(Locale.ROOT).contains("stages"))
+ assert(html.toLowerCase(Locale.ROOT).contains("storage"))
+ assert(html.toLowerCase(Locale.ROOT).contains("environment"))
+ assert(html.toLowerCase(Locale.ROOT).contains("executors"))
}
}
}
@@ -85,7 +86,7 @@ class UISuite extends SparkFunSuite {
// test if visible from http://localhost:4040
eventually(timeout(10 seconds), interval(50 milliseconds)) {
val html = Source.fromURL("http://localhost:4040").mkString
- assert(html.toLowerCase.contains("stages"))
+ assert(html.toLowerCase(Locale.ROOT).contains("stages"))
}
}
}