aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorDavies Liu <davies@databricks.com>2015-07-31 11:07:34 -0700
committerDavies Liu <davies.liu@gmail.com>2015-07-31 11:07:34 -0700
commit0024da9157ba12ec84883a78441fa6835c1d0042 (patch)
treeabff0d88f37acd41c20f127853a21a75c73465bb /sql
parent27ae851ce16082775ffbcb5b8fc6bdbe65dc70fc (diff)
downloadspark-0024da9157ba12ec84883a78441fa6835c1d0042.tar.gz
spark-0024da9157ba12ec84883a78441fa6835c1d0042.tar.bz2
spark-0024da9157ba12ec84883a78441fa6835c1d0042.zip
[SQL] address comments for to_date/trunc
This PR address the comments in #7805 cc rxin Author: Davies Liu <davies@databricks.com> Closes #7817 from davies/trunc and squashes the following commits: f729d5f [Davies Liu] rollback cb7f7832 [Davies Liu] genCode() is protected 31e52ef [Davies Liu] fix style ed1edc7 [Davies Liu] address comments for #7805
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala15
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala3
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala4
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/functions.scala3
4 files changed, 14 insertions, 11 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala
index 6e7613340c..07dea5b470 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala
@@ -726,15 +726,16 @@ case class TruncDate(date: Expression, format: Expression)
override def dataType: DataType = DateType
override def prettyName: String = "trunc"
- lazy val minItemConst = DateTimeUtils.parseTruncLevel(format.eval().asInstanceOf[UTF8String])
+ private lazy val truncLevel: Int =
+ DateTimeUtils.parseTruncLevel(format.eval().asInstanceOf[UTF8String])
override def eval(input: InternalRow): Any = {
- val minItem = if (format.foldable) {
- minItemConst
+ val level = if (format.foldable) {
+ truncLevel
} else {
DateTimeUtils.parseTruncLevel(format.eval().asInstanceOf[UTF8String])
}
- if (minItem == -1) {
+ if (level == -1) {
// unknown format
null
} else {
@@ -742,7 +743,7 @@ case class TruncDate(date: Expression, format: Expression)
if (d == null) {
null
} else {
- DateTimeUtils.truncDate(d.asInstanceOf[Int], minItem)
+ DateTimeUtils.truncDate(d.asInstanceOf[Int], level)
}
}
}
@@ -751,7 +752,7 @@ case class TruncDate(date: Expression, format: Expression)
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
if (format.foldable) {
- if (minItemConst == -1) {
+ if (truncLevel == -1) {
s"""
boolean ${ev.isNull} = true;
${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
@@ -763,7 +764,7 @@ case class TruncDate(date: Expression, format: Expression)
boolean ${ev.isNull} = ${d.isNull};
${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
- ${ev.primitive} = $dtu.truncDate(${d.primitive}, $minItemConst);
+ ${ev.primitive} = $dtu.truncDate(${d.primitive}, $truncLevel);
}
"""
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
index 5a7c25b8d5..032ed8a56a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
@@ -794,7 +794,8 @@ object DateTimeUtils {
} else if (level == TRUNC_TO_MONTH) {
d - DateTimeUtils.getDayOfMonth(d) + 1
} else {
- throw new Exception(s"Invalid trunc level: $level")
+ // caller make sure that this should never be reached
+ sys.error(s"Invalid trunc level: $level")
}
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
index 3c05e5c3b8..a41185b4d8 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
@@ -18,11 +18,9 @@
package org.apache.spark.sql.catalyst.expressions
import org.scalactic.TripleEqualsSupport.Spread
-import org.scalatest.Matchers._
import org.apache.spark.SparkFunSuite
-import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.catalyst.CatalystTypeConverters
+import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.optimizer.DefaultOptimizer
import org.apache.spark.sql.catalyst.plans.logical.{OneRowRelation, Project}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
index 46dc4605a5..5d82a5eadd 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
@@ -2192,6 +2192,9 @@ object functions {
/**
* Returns date truncated to the unit specified by the format.
*
+ * @param format: 'year', 'yyyy', 'yy' for truncate by year,
+ * or 'month', 'mon', 'mm' for truncate by month
+ *
* @group datetime_funcs
* @since 1.5.0
*/