aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst/src/test
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2017-04-10 20:11:56 +0100
committerSean Owen <sowen@cloudera.com>2017-04-10 20:11:56 +0100
commita26e3ed5e414d0a350cfe65dd511b154868b9f1d (patch)
treef8bf8feabae7acdd5b2c29e38273fddb80e3de33 /sql/catalyst/src/test
parentfd711ea13e558f0e7d3e01f08e01444d394499a6 (diff)
downloadspark-a26e3ed5e414d0a350cfe65dd511b154868b9f1d.tar.gz
spark-a26e3ed5e414d0a350cfe65dd511b154868b9f1d.tar.bz2
spark-a26e3ed5e414d0a350cfe65dd511b154868b9f1d.zip
[SPARK-20156][CORE][SQL][STREAMING][MLLIB] Java String toLowerCase "Turkish locale bug" causes Spark problems
## What changes were proposed in this pull request? Add Locale.ROOT to internal calls to String `toLowerCase`, `toUpperCase`, to avoid inadvertent locale-sensitive variation in behavior (aka the "Turkish locale problem"). The change looks large but it is just adding `Locale.ROOT` (the locale with no country or language specified) to every call to these methods. ## How was this patch tested? Existing tests. Author: Sean Owen <sowen@cloudera.com> Closes #17527 from srowen/SPARK-20156.
Diffstat (limited to 'sql/catalyst/src/test')
-rw-r--r--sql/catalyst/src/test/java/org/apache/spark/sql/streaming/JavaOutputModeSuite.java6
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala5
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/UnsupportedOperationsSuite.scala7
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDFSuite.scala4
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/streaming/InternalOutputModesSuite.scala4
5 files changed, 18 insertions, 8 deletions
diff --git a/sql/catalyst/src/test/java/org/apache/spark/sql/streaming/JavaOutputModeSuite.java b/sql/catalyst/src/test/java/org/apache/spark/sql/streaming/JavaOutputModeSuite.java
index e0a54fe30a..d8845e0c83 100644
--- a/sql/catalyst/src/test/java/org/apache/spark/sql/streaming/JavaOutputModeSuite.java
+++ b/sql/catalyst/src/test/java/org/apache/spark/sql/streaming/JavaOutputModeSuite.java
@@ -17,6 +17,8 @@
package org.apache.spark.sql.streaming;
+import java.util.Locale;
+
import org.junit.Test;
public class JavaOutputModeSuite {
@@ -24,8 +26,8 @@ public class JavaOutputModeSuite {
@Test
public void testOutputModes() {
OutputMode o1 = OutputMode.Append();
- assert(o1.toString().toLowerCase().contains("append"));
+ assert(o1.toString().toLowerCase(Locale.ROOT).contains("append"));
OutputMode o2 = OutputMode.Complete();
- assert (o2.toString().toLowerCase().contains("complete"));
+ assert (o2.toString().toLowerCase(Locale.ROOT).contains("complete"));
}
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
index 1be25ec06c..82015b1e06 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
@@ -17,6 +17,8 @@
package org.apache.spark.sql.catalyst.analysis
+import java.util.Locale
+
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.catalog.{InMemoryCatalog, SessionCatalog}
import org.apache.spark.sql.catalyst.plans.PlanTest
@@ -79,7 +81,8 @@ trait AnalysisTest extends PlanTest {
analyzer.checkAnalysis(analyzer.execute(inputPlan))
}
- if (!expectedErrors.map(_.toLowerCase).forall(e.getMessage.toLowerCase.contains)) {
+ if (!expectedErrors.map(_.toLowerCase(Locale.ROOT)).forall(
+ e.getMessage.toLowerCase(Locale.ROOT).contains)) {
fail(
s"""Exception message should contain the following substrings:
|
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/UnsupportedOperationsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/UnsupportedOperationsSuite.scala
index 8f0a0c0d99..c39e372c27 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/UnsupportedOperationsSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/UnsupportedOperationsSuite.scala
@@ -17,19 +17,20 @@
package org.apache.spark.sql.catalyst.analysis
+import java.util.Locale
+
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
-import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, Literal, NamedExpression}
+import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, NamedExpression}
import org.apache.spark.sql.catalyst.expressions.aggregate.Count
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical.{FlatMapGroupsWithState, _}
import org.apache.spark.sql.catalyst.streaming.InternalOutputModes._
import org.apache.spark.sql.streaming.OutputMode
import org.apache.spark.sql.types.{IntegerType, LongType, MetadataBuilder}
-import org.apache.spark.unsafe.types.CalendarInterval
/** A dummy command for testing unsupported operations. */
case class DummyCommand() extends Command
@@ -696,7 +697,7 @@ class UnsupportedOperationsSuite extends SparkFunSuite {
testBody
}
expectedMsgs.foreach { m =>
- if (!e.getMessage.toLowerCase.contains(m.toLowerCase)) {
+ if (!e.getMessage.toLowerCase(Locale.ROOT).contains(m.toLowerCase(Locale.ROOT))) {
fail(s"Exception message should contain: '$m', " +
s"actual exception message:\n\t'${e.getMessage}'")
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDFSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDFSuite.scala
index 7e45028653..13bd363c8b 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDFSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDFSuite.scala
@@ -17,6 +17,8 @@
package org.apache.spark.sql.catalyst.expressions
+import java.util.Locale
+
import org.apache.spark.{SparkException, SparkFunSuite}
import org.apache.spark.sql.types.{IntegerType, StringType}
@@ -32,7 +34,7 @@ class ScalaUDFSuite extends SparkFunSuite with ExpressionEvalHelper {
test("better error message for NPE") {
val udf = ScalaUDF(
- (s: String) => s.toLowerCase,
+ (s: String) => s.toLowerCase(Locale.ROOT),
StringType,
Literal.create(null, StringType) :: Nil)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/streaming/InternalOutputModesSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/streaming/InternalOutputModesSuite.scala
index 201dac35ed..3159b541dc 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/streaming/InternalOutputModesSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/streaming/InternalOutputModesSuite.scala
@@ -17,6 +17,8 @@
package org.apache.spark.sql.catalyst.streaming
+import java.util.Locale
+
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.streaming.OutputMode
@@ -40,7 +42,7 @@ class InternalOutputModesSuite extends SparkFunSuite {
val acceptedModes = Seq("append", "update", "complete")
val e = intercept[IllegalArgumentException](InternalOutputModes(outputMode))
(Seq("output mode", "unknown", outputMode) ++ acceptedModes).foreach { s =>
- assert(e.getMessage.toLowerCase.contains(s.toLowerCase))
+ assert(e.getMessage.toLowerCase(Locale.ROOT).contains(s.toLowerCase(Locale.ROOT)))
}
}
testMode("Xyz")