aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2016-11-02 09:39:15 +0000
committerSean Owen <sowen@cloudera.com>2016-11-02 09:39:15 +0000
commit9c8deef64efee20a0ddc9b612f90e77c80aede60 (patch)
treefeb6a7eeb4e10f628ff1227787c1ee430cb6195b /sql/core/src/test
parent70a5db7bbd192a4bc68bcfdc475ab221adf2fcdd (diff)
downloadspark-9c8deef64efee20a0ddc9b612f90e77c80aede60.tar.gz
spark-9c8deef64efee20a0ddc9b612f90e77c80aede60.tar.bz2
spark-9c8deef64efee20a0ddc9b612f90e77c80aede60.zip
[SPARK-18076][CORE][SQL] Fix default Locale used in DateFormat, NumberFormat to Locale.US
## What changes were proposed in this pull request? Fix `Locale.US` for all usages of `DateFormat`, `NumberFormat` ## How was this patch tested? Existing tests. Author: Sean Owen <sowen@cloudera.com> Closes #15610 from srowen/SPARK-18076.
Diffstat (limited to 'sql/core/src/test')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala11
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala9
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVTypeCastSuite.scala9
3 files changed, 15 insertions, 14 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
index f7aa3b747a..e05b2252ee 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
@@ -19,6 +19,7 @@ package org.apache.spark.sql
import java.sql.{Date, Timestamp}
import java.text.SimpleDateFormat
+import java.util.Locale
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.functions._
@@ -55,8 +56,8 @@ class DateFunctionsSuite extends QueryTest with SharedSQLContext {
checkAnswer(sql("""SELECT CURRENT_TIMESTAMP() = NOW()"""), Row(true))
}
- val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
- val sdfDate = new SimpleDateFormat("yyyy-MM-dd")
+ val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.US)
+ val sdfDate = new SimpleDateFormat("yyyy-MM-dd", Locale.US)
val d = new Date(sdf.parse("2015-04-08 13:10:15").getTime)
val ts = new Timestamp(sdf.parse("2013-04-08 13:10:15").getTime)
@@ -395,11 +396,11 @@ class DateFunctionsSuite extends QueryTest with SharedSQLContext {
}
test("from_unixtime") {
- val sdf1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
+ val sdf1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.US)
val fmt2 = "yyyy-MM-dd HH:mm:ss.SSS"
- val sdf2 = new SimpleDateFormat(fmt2)
+ val sdf2 = new SimpleDateFormat(fmt2, Locale.US)
val fmt3 = "yy-MM-dd HH-mm-ss"
- val sdf3 = new SimpleDateFormat(fmt3)
+ val sdf3 = new SimpleDateFormat(fmt3, Locale.US)
val df = Seq((1000, "yyyy-MM-dd HH:mm:ss.SSS"), (-1000, "yy-MM-dd HH-mm-ss")).toDF("a", "b")
checkAnswer(
df.select(from_unixtime(col("a"))),
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala
index f7c22c6c93..8209b5bd7f 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala
@@ -21,6 +21,7 @@ import java.io.File
import java.nio.charset.UnsupportedCharsetException
import java.sql.{Date, Timestamp}
import java.text.SimpleDateFormat
+import java.util.Locale
import org.apache.commons.lang3.time.FastDateFormat
import org.apache.hadoop.io.SequenceFile.CompressionType
@@ -487,7 +488,7 @@ class CSVSuite extends QueryTest with SharedSQLContext with SQLTestUtils {
.select("date")
.collect()
- val dateFormat = new SimpleDateFormat("dd/MM/yyyy HH:mm")
+ val dateFormat = new SimpleDateFormat("dd/MM/yyyy HH:mm", Locale.US)
val expected =
Seq(Seq(new Timestamp(dateFormat.parse("26/08/2015 18:00").getTime)),
Seq(new Timestamp(dateFormat.parse("27/10/2014 18:30").getTime)),
@@ -509,7 +510,7 @@ class CSVSuite extends QueryTest with SharedSQLContext with SQLTestUtils {
.select("date")
.collect()
- val dateFormat = new SimpleDateFormat("dd/MM/yyyy hh:mm")
+ val dateFormat = new SimpleDateFormat("dd/MM/yyyy hh:mm", Locale.US)
val expected = Seq(
new Date(dateFormat.parse("26/08/2015 18:00").getTime),
new Date(dateFormat.parse("27/10/2014 18:30").getTime),
@@ -728,7 +729,7 @@ class CSVSuite extends QueryTest with SharedSQLContext with SQLTestUtils {
.option("inferSchema", "false")
.load(iso8601timestampsPath)
- val iso8501 = FastDateFormat.getInstance("yyyy-MM-dd'T'HH:mm:ss.SSSZZ")
+ val iso8501 = FastDateFormat.getInstance("yyyy-MM-dd'T'HH:mm:ss.SSSZZ", Locale.US)
val expectedTimestamps = timestamps.collect().map { r =>
// This should be ISO8601 formatted string.
Row(iso8501.format(r.toSeq.head))
@@ -761,7 +762,7 @@ class CSVSuite extends QueryTest with SharedSQLContext with SQLTestUtils {
.option("inferSchema", "false")
.load(iso8601datesPath)
- val iso8501 = FastDateFormat.getInstance("yyyy-MM-dd")
+ val iso8501 = FastDateFormat.getInstance("yyyy-MM-dd", Locale.US)
val expectedDates = dates.collect().map { r =>
// This should be ISO8601 formatted string.
Row(iso8501.format(r.toSeq.head))
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVTypeCastSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVTypeCastSuite.scala
index 51832a13cf..c74406b9cb 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVTypeCastSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVTypeCastSuite.scala
@@ -144,13 +144,12 @@ class CSVTypeCastSuite extends SparkFunSuite {
DateTimeUtils.millisToDays(DateTimeUtils.stringToTime("2015-01-01").getTime))
}
- test("Float and Double Types are cast correctly with Locale") {
+ test("Float and Double Types are cast without respect to platform default Locale") {
val originalLocale = Locale.getDefault
try {
- val locale : Locale = new Locale("fr", "FR")
- Locale.setDefault(locale)
- assert(CSVTypeCast.castTo("1,00", FloatType) == 1.0)
- assert(CSVTypeCast.castTo("1,00", DoubleType) == 1.0)
+ Locale.setDefault(new Locale("fr", "FR"))
+ assert(CSVTypeCast.castTo("1,00", FloatType) == 100.0) // Would parse as 1.0 in fr-FR
+ assert(CSVTypeCast.castTo("1,00", DoubleType) == 100.0)
} finally {
Locale.setDefault(originalLocale)
}