aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala10
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala49
2 files changed, 53 insertions, 6 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
index f44937ec6f..010361a32e 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
@@ -23,9 +23,8 @@ import org.apache.spark.sql.catalyst.planning._
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution._
-import org.apache.spark.sql.execution.command.{DescribeCommand => RunnableDescribeCommand, _}
-import org.apache.spark.sql.execution.datasources.{CreateTableUsing, CreateTableUsingAsSelect,
- DescribeCommand}
+import org.apache.spark.sql.execution.command.{DescribeCommand => _, _}
+import org.apache.spark.sql.execution.datasources.{CreateTableUsing, CreateTableUsingAsSelect, CreateTempTableUsingAsSelect, DescribeCommand}
import org.apache.spark.sql.hive.execution._
private[hive] trait HiveStrategies {
@@ -90,6 +89,11 @@ private[hive] trait HiveStrategies {
tableIdent, userSpecifiedSchema, provider, opts, allowExisting, managedIfNoPath)
ExecutedCommand(cmd) :: Nil
+ case c: CreateTableUsingAsSelect if c.temporary =>
+ val cmd = CreateTempTableUsingAsSelect(
+ c.tableIdent, c.provider, c.partitionColumns, c.mode, c.options, c.child)
+ ExecutedCommand(cmd) :: Nil
+
case c: CreateTableUsingAsSelect =>
val cmd = CreateMetastoreDataSourceAsSelect(c.tableIdent, c.provider, c.partitionColumns,
c.bucketSpec, c.mode, c.options, c.child)
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index b4886eba7a..7eaf19dfe9 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -19,12 +19,9 @@ package org.apache.spark.sql.hive.execution
import java.sql.{Date, Timestamp}
-import scala.collection.JavaConverters._
-
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, FunctionRegistry}
-import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.execution.datasources.LogicalRelation
import org.apache.spark.sql.functions._
import org.apache.spark.sql.hive.{HiveContext, MetastoreRelation}
@@ -1852,4 +1849,50 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
}
}
}
+
+ test(
+ "SPARK-14488 \"CREATE TEMPORARY TABLE ... USING ... AS SELECT ...\" " +
+ "shouldn't create persisted table"
+ ) {
+ withTempPath { dir =>
+ withTempTable("t1", "t2") {
+ val path = dir.getCanonicalPath
+ val ds = sqlContext.range(10)
+ ds.registerTempTable("t1")
+
+ sql(
+ s"""CREATE TEMPORARY TABLE t2
+ |USING PARQUET
+ |OPTIONS (PATH '$path')
+ |AS SELECT * FROM t1
+ """.stripMargin)
+
+ checkAnswer(
+ sqlContext.tables().select('isTemporary).filter('tableName === "t2"),
+ Row(true)
+ )
+
+ checkAnswer(table("t2"), table("t1"))
+ }
+ }
+ }
+
+ test(
+ "SPARK-14493 \"CREATE TEMPORARY TABLE ... USING ... AS SELECT ...\" " +
+ "shouldn always be used together with PATH data source option"
+ ) {
+ withTempTable("t") {
+ sqlContext.range(10).registerTempTable("t")
+
+ val message = intercept[IllegalArgumentException] {
+ sql(
+ s"""CREATE TEMPORARY TABLE t1
+ |USING PARQUET
+ |AS SELECT * FROM t
+ """.stripMargin)
+ }.getMessage
+
+ assert(message == "'path' is not specified")
+ }
+ }
}