aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src/main/scala/org
diff options
context:
space:
mode:
authorCheng Lian <lian@databricks.com>2016-04-12 22:28:57 +0800
committerCheng Lian <lian@databricks.com>2016-04-12 22:28:57 +0800
commit124cbfb683a5e959e1b5181d4d0cc56956b50385 (patch)
tree15fd0307bbd09e7b1f07752f5915a4344861a0fa /sql/hive/src/main/scala/org
parentb0f5497e9520575e5082fa8ce8be5569f43abe74 (diff)
downloadspark-124cbfb683a5e959e1b5181d4d0cc56956b50385.tar.gz
spark-124cbfb683a5e959e1b5181d4d0cc56956b50385.tar.bz2
spark-124cbfb683a5e959e1b5181d4d0cc56956b50385.zip
[SPARK-14488][SPARK-14493][SQL] "CREATE TEMPORARY TABLE ... USING ... AS SELECT" shouldn't create persisted table
## What changes were proposed in this pull request? When planning logical plan node `CreateTableUsingAsSelect`, we neglected its `temporary` field and always generates a `CreateMetastoreDataSourceAsSelect`. This PR fixes this issue generating `CreateTempTableUsingAsSelect` when `temporary` is true. This PR also fixes SPARK-14493 since the root cause of SPARK-14493 is that we were `CreateMetastoreDataSourceAsSelect` uses default Hive warehouse location when `PATH` data source option is absent. ## How was this patch tested? Added a test case to create a temporary table using the target syntax and check whether it's indeed a temporary table. Author: Cheng Lian <lian@databricks.com> Closes #12303 from liancheng/spark-14488-fix-ctas-using.
Diffstat (limited to 'sql/hive/src/main/scala/org')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala10
1 files changed, 7 insertions, 3 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
index f44937ec6f..010361a32e 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
@@ -23,9 +23,8 @@ import org.apache.spark.sql.catalyst.planning._
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution._
-import org.apache.spark.sql.execution.command.{DescribeCommand => RunnableDescribeCommand, _}
-import org.apache.spark.sql.execution.datasources.{CreateTableUsing, CreateTableUsingAsSelect,
- DescribeCommand}
+import org.apache.spark.sql.execution.command.{DescribeCommand => _, _}
+import org.apache.spark.sql.execution.datasources.{CreateTableUsing, CreateTableUsingAsSelect, CreateTempTableUsingAsSelect, DescribeCommand}
import org.apache.spark.sql.hive.execution._
private[hive] trait HiveStrategies {
@@ -90,6 +89,11 @@ private[hive] trait HiveStrategies {
tableIdent, userSpecifiedSchema, provider, opts, allowExisting, managedIfNoPath)
ExecutedCommand(cmd) :: Nil
+ case c: CreateTableUsingAsSelect if c.temporary =>
+ val cmd = CreateTempTableUsingAsSelect(
+ c.tableIdent, c.provider, c.partitionColumns, c.mode, c.options, c.child)
+ ExecutedCommand(cmd) :: Nil
+
case c: CreateTableUsingAsSelect =>
val cmd = CreateMetastoreDataSourceAsSelect(c.tableIdent, c.provider, c.partitionColumns,
c.bucketSpec, c.mode, c.options, c.child)