diff options
author | Cheng Hao <hao.cheng@intel.com> | 2014-10-28 14:36:06 -0700 |
---|---|---|
committer | Michael Armbrust <michael@databricks.com> | 2014-10-28 14:36:06 -0700 |
commit | 4b55482abf899c27da3d55401ad26b4e9247b327 (patch) | |
tree | 53d9bff976ad43beff49a7b20e010f0de0cee793 /sql/hive/v0.12.0/src/main/scala | |
parent | abcafcfba38d7c8dba68a5510475c5c49ae54d92 (diff) | |
download | spark-4b55482abf899c27da3d55401ad26b4e9247b327.tar.gz spark-4b55482abf899c27da3d55401ad26b4e9247b327.tar.bz2 spark-4b55482abf899c27da3d55401ad26b4e9247b327.zip |
[SPARK-3343] [SQL] Add serde support for CTAS
Currently, `CTAS` (Create Table As Select) doesn't support specifying the `SerDe` in HQL. This PR will pass down the `ASTNode` into the physical operator `execution.CreateTableAsSelect`, which will extract the `CreateTableDesc` object via Hive `SemanticAnalyzer`. In the meantime, I also update the `HiveMetastoreCatalog.createTable` to optionally support the `CreateTableDesc` for table creation.
Author: Cheng Hao <hao.cheng@intel.com>
Closes #2570 from chenghao-intel/ctas_serde and squashes the following commits:
e011ef5 [Cheng Hao] shim for both 0.12 & 0.13.1
cfb3662 [Cheng Hao] revert to hive 0.12
c8a547d [Cheng Hao] Support SerDe properties within CTAS
Diffstat (limited to 'sql/hive/v0.12.0/src/main/scala')
-rw-r--r-- | sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala | 5 |
1 files changed, 4 insertions, 1 deletions
diff --git a/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala b/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala index 2317d2e763..8cb81db8a9 100644 --- a/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala +++ b/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala @@ -26,7 +26,7 @@ import org.apache.hadoop.hive.common.`type`.HiveDecimal import org.apache.hadoop.hive.conf.HiveConf import org.apache.hadoop.hive.ql.Context import org.apache.hadoop.hive.ql.metadata.{Hive, Partition, Table} -import org.apache.hadoop.hive.ql.plan.{FileSinkDesc, TableDesc} +import org.apache.hadoop.hive.ql.plan.{CreateTableDesc, FileSinkDesc, TableDesc} import org.apache.hadoop.hive.ql.processors._ import org.apache.hadoop.hive.ql.stats.StatsSetupConst import org.apache.hadoop.hive.serde2.{Deserializer, ColumnProjectionUtils} @@ -89,6 +89,9 @@ private[hive] object HiveShim { "udf_concat" ) + def setLocation(tbl: Table, crtTbl: CreateTableDesc): Unit = { + tbl.setDataLocation(new Path(crtTbl.getLocation()).toUri()) + } } class ShimFileSinkDesc(var dir: String, var tableInfo: TableDesc, var compressed: Boolean) |