aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src
diff options
context:
space:
mode:
authorLiang-Chi Hsieh <simonh@tw.ibm.com>2016-04-14 11:08:08 -0700
committerAndrew Or <andrew@databricks.com>2016-04-14 11:08:08 -0700
commit28efdd3fd789fa2ebed5be03b36ca0f682e37669 (patch)
tree0c354cfde01c380c63d27b507a702c30616f6ce6 /sql/core/src
parentc971aee40d806ed02d3d6a5cc478b63654052e54 (diff)
downloadspark-28efdd3fd789fa2ebed5be03b36ca0f682e37669.tar.gz
spark-28efdd3fd789fa2ebed5be03b36ca0f682e37669.tar.bz2
spark-28efdd3fd789fa2ebed5be03b36ca0f682e37669.zip
[SPARK-14592][SQL] Native support for CREATE TABLE LIKE DDL command
## What changes were proposed in this pull request? JIRA: https://issues.apache.org/jira/browse/SPARK-14592 This patch adds native support for DDL command `CREATE TABLE LIKE`. The SQL syntax is like: CREATE TABLE table_name LIKE existing_table CREATE TABLE IF NOT EXISTS table_name LIKE existing_table ## How was this patch tested? `HiveDDLCommandSuite`. `HiveQuerySuite` already tests `CREATE TABLE LIKE`. Author: Liang-Chi Hsieh <simonh@tw.ibm.com> This patch had conflicts when merged, resolved by Committer: Andrew Or <andrew@databricks.com> Closes #12362 from viirya/create-table-like.
Diffstat (limited to 'sql/core/src')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala40
1 files changed, 38 insertions, 2 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
index e315598daa..0b41985174 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
@@ -17,9 +17,45 @@
package org.apache.spark.sql.execution.command
-import org.apache.spark.sql.{Row, SQLContext}
+import org.apache.spark.sql.{AnalysisException, Row, SQLContext}
import org.apache.spark.sql.catalyst.TableIdentifier
-import org.apache.spark.sql.catalyst.catalog.CatalogTable
+import org.apache.spark.sql.catalyst.catalog.{CatalogTable, CatalogTableType}
+
+/**
+ * A command to create a table with the same definition of the given existing table.
+ *
+ * The syntax of using this command in SQL is:
+ * {{{
+ * CREATE TABLE [IF NOT EXISTS] [db_name.]table_name
+ * LIKE [other_db_name.]existing_table_name
+ * }}}
+ */
+case class CreateTableLike(
+ targetTable: TableIdentifier,
+ sourceTable: TableIdentifier,
+ ifNotExists: Boolean) extends RunnableCommand {
+
+ override def run(sqlContext: SQLContext): Seq[Row] = {
+ val catalog = sqlContext.sessionState.catalog
+ if (!catalog.tableExists(sourceTable)) {
+ throw new AnalysisException(
+ s"Source table in CREATE TABLE LIKE does not exist: '$sourceTable'")
+ }
+ if (catalog.isTemporaryTable(sourceTable)) {
+ throw new AnalysisException(
+ s"Source table in CREATE TABLE LIKE cannot be temporary: '$sourceTable'")
+ }
+
+ val tableToCreate = catalog.getTableMetadata(sourceTable).copy(
+ identifier = targetTable,
+ tableType = CatalogTableType.MANAGED_TABLE,
+ createTime = System.currentTimeMillis,
+ lastAccessTime = -1).withNewStorage(locationUri = None)
+
+ catalog.createTable(tableToCreate, ifNotExists)
+ Seq.empty[Row]
+ }
+}
// TODO: move the rest of the table commands from ddl.scala to this file