aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorYin Huai <yhuai@databricks.com>2016-06-20 20:17:47 +0800
committerCheng Lian <lian@databricks.com>2016-06-20 20:17:47 +0800
commit905f774b71f4b814d5a2412c7c35bd023c3dfdf8 (patch)
tree80e2f1fa4c18d0d5625fbc4ff6c9528bab6c4690 /sql/catalyst
parent6d0f921aedfdd3b7e8472b6776d0c7d8299190bd (diff)
downloadspark-905f774b71f4b814d5a2412c7c35bd023c3dfdf8.tar.gz
spark-905f774b71f4b814d5a2412c7c35bd023c3dfdf8.tar.bz2
spark-905f774b71f4b814d5a2412c7c35bd023c3dfdf8.zip
[SPARK-16030][SQL] Allow specifying static partitions when inserting to data source tables
## What changes were proposed in this pull request? This PR adds the static partition support to INSERT statement when the target table is a data source table. ## How was this patch tested? New tests in InsertIntoHiveTableSuite and DataSourceAnalysisSuite. **Note: This PR is based on https://github.com/apache/spark/pull/13766. The last commit is the actual change.** Author: Yin Huai <yhuai@databricks.com> Closes #13769 from yhuai/SPARK-16030-1.
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala19
1 files changed, 19 insertions, 0 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
index 7b451baaa0..899227674f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
@@ -313,6 +313,8 @@ trait CheckAnalysis extends PredicateHelper {
|${s.catalogTable.identifier}
""".stripMargin)
+ // TODO: We need to consolidate this kind of checks for InsertIntoTable
+ // with the rule of PreWriteCheck defined in extendedCheckRules.
case InsertIntoTable(s: SimpleCatalogRelation, _, _, _, _) =>
failAnalysis(
s"""
@@ -320,6 +322,23 @@ trait CheckAnalysis extends PredicateHelper {
|${s.catalogTable.identifier}
""".stripMargin)
+ case InsertIntoTable(t, _, _, _, _)
+ if !t.isInstanceOf[LeafNode] ||
+ t == OneRowRelation ||
+ t.isInstanceOf[LocalRelation] =>
+ failAnalysis(s"Inserting into an RDD-based table is not allowed.")
+
+ case i @ InsertIntoTable(table, partitions, query, _, _) =>
+ val numStaticPartitions = partitions.values.count(_.isDefined)
+ if (table.output.size != (query.output.size + numStaticPartitions)) {
+ failAnalysis(
+ s"$table requires that the data to be inserted have the same number of " +
+ s"columns as the target table: target table has ${table.output.size} " +
+ s"column(s) but the inserted data has " +
+ s"${query.output.size + numStaticPartitions} column(s), including " +
+ s"$numStaticPartitions partition column(s) having constant value(s).")
+ }
+
case o if !o.resolved =>
failAnalysis(
s"unresolved operator ${operator.simpleString}")