aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorWenchen Fan <wenchen@databricks.com>2015-11-16 08:58:40 -0800
committerDavies Liu <davies.liu@gmail.com>2015-11-16 08:58:40 -0800
commit06f1fdba6d1425afddfc1d45a20dbe9bede15e7a (patch)
tree0febbf696c1d3492fc29285e2438d3cb7696cb41 /sql
parent0e79604aed116bdcb40e03553a2d103b5b1cdbae (diff)
downloadspark-06f1fdba6d1425afddfc1d45a20dbe9bede15e7a.tar.gz
spark-06f1fdba6d1425afddfc1d45a20dbe9bede15e7a.tar.bz2
spark-06f1fdba6d1425afddfc1d45a20dbe9bede15e7a.zip
[SPARK-11752] [SQL] fix timezone problem for DateTimeUtils.getSeconds
code snippet to reproduce it: ``` TimeZone.setDefault(TimeZone.getTimeZone("Asia/Shanghai")) val t = Timestamp.valueOf("1900-06-11 12:14:50.789") val us = fromJavaTimestamp(t) assert(getSeconds(us) === t.getSeconds) ``` it will be good to add a regression test for it, but the reproducing code need to change the default timezone, and even we change it back, the `lazy val defaultTimeZone` in `DataTimeUtils` is fixed. Author: Wenchen Fan <wenchen@databricks.com> Closes #9728 from cloud-fan/seconds.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala14
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala2
2 files changed, 9 insertions, 7 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
index deff8a5378..8fb3f41f1b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
@@ -395,16 +395,19 @@ object DateTimeUtils {
/**
* Returns the microseconds since year zero (-17999) from microseconds since epoch.
*/
- def absoluteMicroSecond(microsec: SQLTimestamp): SQLTimestamp = {
+ private def absoluteMicroSecond(microsec: SQLTimestamp): SQLTimestamp = {
microsec + toYearZero * MICROS_PER_DAY
}
+ private def localTimestamp(microsec: SQLTimestamp): SQLTimestamp = {
+ absoluteMicroSecond(microsec) + defaultTimeZone.getOffset(microsec / 1000) * 1000L
+ }
+
/**
* Returns the hour value of a given timestamp value. The timestamp is expressed in microseconds.
*/
def getHours(microsec: SQLTimestamp): Int = {
- val localTs = absoluteMicroSecond(microsec) + defaultTimeZone.getOffset(microsec / 1000) * 1000L
- ((localTs / MICROS_PER_SECOND / 3600) % 24).toInt
+ ((localTimestamp(microsec) / MICROS_PER_SECOND / 3600) % 24).toInt
}
/**
@@ -412,8 +415,7 @@ object DateTimeUtils {
* microseconds.
*/
def getMinutes(microsec: SQLTimestamp): Int = {
- val localTs = absoluteMicroSecond(microsec) + defaultTimeZone.getOffset(microsec / 1000) * 1000L
- ((localTs / MICROS_PER_SECOND / 60) % 60).toInt
+ ((localTimestamp(microsec) / MICROS_PER_SECOND / 60) % 60).toInt
}
/**
@@ -421,7 +423,7 @@ object DateTimeUtils {
* microseconds.
*/
def getSeconds(microsec: SQLTimestamp): Int = {
- ((absoluteMicroSecond(microsec) / MICROS_PER_SECOND) % 60).toInt
+ ((localTimestamp(microsec) / MICROS_PER_SECOND) % 60).toInt
}
private[this] def isLeapYear(year: Int): Boolean = {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
index 64d15e6b91..60d45422bc 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
@@ -358,7 +358,7 @@ class DateTimeUtilsSuite extends SparkFunSuite {
assert(getSeconds(c.getTimeInMillis * 1000) === 9)
}
- test("hours / miniute / seconds") {
+ test("hours / minutes / seconds") {
Seq(Timestamp.valueOf("2015-06-11 10:12:35.789"),
Timestamp.valueOf("2015-06-11 20:13:40.789"),
Timestamp.valueOf("1900-06-11 12:14:50.789"),