From 3a3c9ffbd282244407e9437c2b02ae7e062dd183 Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Sun, 18 Sep 2016 15:37:15 +0800 Subject: [SPARK-17518][SQL] Block Users to Specify the Internal Data Source Provider Hive ### What changes were proposed in this pull request? In Spark 2.1, we introduced a new internal provider `hive` for telling Hive serde tables from data source tables. This PR is to block users to specify this in `DataFrameWriter` and SQL APIs. ### How was this patch tested? Added a test case Author: gatorsmile Closes #15073 from gatorsmile/formatHive. --- sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala | 3 +++ .../main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala | 5 ++++- .../src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala | 2 +- .../test/scala/org/apache/spark/sql/internal/CatalogSuite.scala | 7 +++++++ 4 files changed, 15 insertions(+), 2 deletions(-) (limited to 'sql/core/src') diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala index e137f076a0..64d3422cb4 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala @@ -357,6 +357,9 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) { } private def saveAsTable(tableIdent: TableIdentifier): Unit = { + if (source.toLowerCase == "hive") { + throw new AnalysisException("Cannot create hive serde table with saveAsTable API") + } val tableExists = df.sparkSession.sessionState.catalog.tableExists(tableIdent) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala index 7ba1a9ff22..5359cedc80 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala @@ -22,7 +22,7 @@ import scala.collection.JavaConverters._ import org.antlr.v4.runtime.{ParserRuleContext, Token} import org.antlr.v4.runtime.tree.TerminalNode -import org.apache.spark.sql.SaveMode +import org.apache.spark.sql.{AnalysisException, SaveMode} import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier} import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.parser._ @@ -316,6 +316,9 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder { } val options = Option(ctx.tablePropertyList).map(visitPropertyKeyValues).getOrElse(Map.empty) val provider = ctx.tableProvider.qualifiedName.getText + if (provider.toLowerCase == "hive") { + throw new AnalysisException("Cannot create hive serde table with CREATE TABLE USING") + } val schema = Option(ctx.colTypeList()).map(createStructType) val partitionColumnNames = Option(ctx.partitionColumnNames) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala index 1f87f0e73a..78ad710a62 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala @@ -258,7 +258,7 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog { source: String, schema: StructType, options: Map[String, String]): DataFrame = { - if (source == "hive") { + if (source.toLowerCase == "hive") { throw new AnalysisException("Cannot create hive serde table with createExternalTable API.") } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala index b221eed7b2..549fd63f74 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala @@ -322,6 +322,13 @@ class CatalogSuite assert(e2.message == "Cannot create a file-based external data source table without path") } + test("createExternalTable should fail if provider is hive") { + val e = intercept[AnalysisException] { + spark.catalog.createExternalTable("tbl", "HiVe", Map.empty[String, String]) + } + assert(e.message.contains("Cannot create hive serde table with createExternalTable API")) + } + // TODO: add tests for the rest of them } -- cgit v1.2.3