aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@databricks.com>2015-05-17 11:59:28 -0700
committerReynold Xin <rxin@databricks.com>2015-05-17 11:59:28 -0700
commit564562874f589c4c8bcabcd9d6eb9a6b0eada938 (patch)
tree11d7abdbe268d282ac2a5694e87b2515373c63ea /sql
parentf2cc6b5bccc3a70fd7d69183b1a068800831fe19 (diff)
downloadspark-564562874f589c4c8bcabcd9d6eb9a6b0eada938.tar.gz
spark-564562874f589c4c8bcabcd9d6eb9a6b0eada938.tar.bz2
spark-564562874f589c4c8bcabcd9d6eb9a6b0eada938.zip
[SPARK-7686] [SQL] DescribeCommand is assigned wrong output attributes in SparkStrategies
In `SparkStrategies`, `RunnableDescribeCommand` is called with the output attributes of the table being described rather than the attributes for the `describe` command's output. I discovered this issue because it caused type conversion errors in some UnsafeRow conversion code that I'm writing. Author: Josh Rosen <joshrosen@databricks.com> Closes #6217 from JoshRosen/SPARK-7686 and squashes the following commits: 953a344 [Josh Rosen] Fix SPARK-7686 with a simple change in SparkStrategies. a4eec9f [Josh Rosen] Add failing regression test for SPARK-7686
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/sources/DDLTestSuite.scala6
2 files changed, 8 insertions, 2 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
index af0029cb84..3f6a0345bc 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
@@ -354,10 +354,10 @@ private[sql] abstract class SparkStrategies extends QueryPlanner[SparkPlan] {
case c: CreateTableUsingAsSelect if !c.temporary =>
sys.error("Tables created with SQLContext must be TEMPORARY. Use a HiveContext instead.")
- case LogicalDescribeCommand(table, isExtended) =>
+ case describe @ LogicalDescribeCommand(table, isExtended) =>
val resultPlan = self.sqlContext.executePlan(table).executedPlan
ExecutedCommand(
- RunnableDescribeCommand(resultPlan, resultPlan.output, isExtended)) :: Nil
+ RunnableDescribeCommand(resultPlan, describe.output, isExtended)) :: Nil
case _ => Nil
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLTestSuite.scala
index 6664e8d64c..f5106f67a0 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLTestSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLTestSuite.scala
@@ -99,4 +99,10 @@ class DDLTestSuite extends DataSourceTest {
Row("arrayType", "array<string>", ""),
Row("structType", "struct<f1:string,f2:int>", "")
))
+
+ test("SPARK-7686 DescribeCommand should have correct physical plan output attributes") {
+ val attributes = sql("describe ddlPeople").queryExecution.executedPlan.output
+ assert(attributes.map(_.name) === Seq("col_name", "data_type", "comment"))
+ assert(attributes.map(_.dataType).toSet === Set(StringType))
+ }
}