aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorWenchen Fan <cloud0fan@outlook.com>2015-07-08 10:51:32 -0700
committerReynold Xin <rxin@databricks.com>2015-07-08 10:51:32 -0700
commit0ba98c04c726a827df8cb19b0db17c352a647960 (patch)
tree5d12dde981c3abe0fde63702e60fa2c4693f6ab0 /sql/core
parent74335b31072951244967f878d8b766cd1bfc2ac6 (diff)
downloadspark-0ba98c04c726a827df8cb19b0db17c352a647960.tar.gz
spark-0ba98c04c726a827df8cb19b0db17c352a647960.tar.bz2
spark-0ba98c04c726a827df8cb19b0db17c352a647960.zip
[SPARK-8753][SQL] Create an IntervalType data type
We need a new data type to represent time intervals. Because we can't determine how many days in a month, so we need 2 values for interval: a int `months`, a long `microseconds`. The interval literal syntax looks like: `interval 3 years -4 month 4 weeks 3 second` Because we use number of 100ns as value of `TimestampType`, so it may not makes sense to support nano second unit. Author: Wenchen Fan <cloud0fan@outlook.com> Closes #7226 from cloud-fan/interval and squashes the following commits: 632062d [Wenchen Fan] address comments ac348c3 [Wenchen Fan] use case class 0342d2e [Wenchen Fan] use array byte df9256c [Wenchen Fan] fix style fd6f18a [Wenchen Fan] address comments 1856af3 [Wenchen Fan] support interval type
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala3
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala25
2 files changed, 28 insertions, 0 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
index 1f0b93e285..d7440c55bd 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
@@ -304,6 +304,9 @@ private[sql] object ResolvedDataSource {
mode: SaveMode,
options: Map[String, String],
data: DataFrame): ResolvedDataSource = {
+ if (data.schema.map(_.dataType).exists(_.isInstanceOf[IntervalType])) {
+ throw new AnalysisException("Cannot save interval data type into external storage.")
+ }
val clazz: Class[_] = lookupDataSource(provider)
val relation = clazz.newInstance() match {
case dataSource: CreatableRelationProvider =>
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 12ad019e8b..231440892b 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -1467,4 +1467,29 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll with SQLTestUtils {
checkAnswer(sql("select count, sort from t"), Row(1, "a"))
}
}
+
+ test("SPARK-8753: add interval type") {
+ import org.apache.spark.unsafe.types.Interval
+
+ val df = sql("select interval 3 years -3 month 7 week 123 microseconds")
+ checkAnswer(df, Row(new Interval(12 * 3 - 3, 7L * 1000 * 1000 * 3600 * 24 * 7 + 123 )))
+ withTempPath(f => {
+ // Currently we don't yet support saving out values of interval data type.
+ val e = intercept[AnalysisException] {
+ df.write.json(f.getCanonicalPath)
+ }
+ e.message.contains("Cannot save interval data type into external storage")
+ })
+
+ def checkIntervalParseError(s: String): Unit = {
+ val e = intercept[AnalysisException] {
+ sql(s)
+ }
+ e.message.contains("at least one time unit should be given for interval literal")
+ }
+
+ checkIntervalParseError("select interval")
+ // Currently we don't yet support nanosecond
+ checkIntervalParseError("select interval 23 nanosecond")
+ }
}