aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst/src
diff options
context:
space:
mode:
authorpetermaxlee <petermaxlee@gmail.com>2016-08-20 13:19:38 +0800
committerWenchen Fan <wenchen@databricks.com>2016-08-20 13:19:38 +0800
commit45d40d9f66c666eec6df926db23937589d67225d (patch)
tree30ca4c1958edf9dacea767e4a7ca6bfa48a74d77 /sql/catalyst/src
parentba1737c21aab91ff3f1a1737aa2d6b07575e36a3 (diff)
downloadspark-45d40d9f66c666eec6df926db23937589d67225d.tar.gz
spark-45d40d9f66c666eec6df926db23937589d67225d.tar.bz2
spark-45d40d9f66c666eec6df926db23937589d67225d.zip
[SPARK-17150][SQL] Support SQL generation for inline tables
## What changes were proposed in this pull request? This patch adds support for SQL generation for inline tables. With this, it would be possible to create a view that depends on inline tables. ## How was this patch tested? Added a test case in LogicalPlanToSQLSuite. Author: petermaxlee <petermaxlee@gmail.com> Closes #14709 from petermaxlee/SPARK-17150.
Diffstat (limited to 'sql/catalyst/src')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LocalRelation.scala17
1 files changed, 15 insertions, 2 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LocalRelation.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LocalRelation.scala
index 9d64f35efc..890865d177 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LocalRelation.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LocalRelation.scala
@@ -18,8 +18,9 @@
package org.apache.spark.sql.catalyst.plans.logical
import org.apache.spark.sql.Row
-import org.apache.spark.sql.catalyst.{analysis, CatalystTypeConverters, InternalRow}
-import org.apache.spark.sql.catalyst.expressions.Attribute
+import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
+import org.apache.spark.sql.catalyst.analysis
+import org.apache.spark.sql.catalyst.expressions.{Attribute, Literal}
import org.apache.spark.sql.types.{StructField, StructType}
object LocalRelation {
@@ -75,4 +76,16 @@ case class LocalRelation(output: Seq[Attribute], data: Seq[InternalRow] = Nil)
override lazy val statistics =
Statistics(sizeInBytes = output.map(_.dataType.defaultSize).sum * data.length)
+
+ def toSQL(inlineTableName: String): String = {
+ require(data.nonEmpty)
+ val types = output.map(_.dataType)
+ val rows = data.map { row =>
+ val cells = row.toSeq(types).zip(types).map { case (v, tpe) => Literal(v, tpe).sql }
+ cells.mkString("(", ", ", ")")
+ }
+ "VALUES " + rows.mkString(", ") +
+ " AS " + inlineTableName +
+ output.map(_.name).mkString("(", ", ", ")")
+ }
}