aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala15
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala3
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala48
3 files changed, 65 insertions, 1 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
index 28aa249888..cd242d78a4 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
@@ -18,6 +18,7 @@
package org.apache.spark.sql.catalyst.analysis
import org.apache.spark.sql.AnalysisException
+import org.apache.spark.sql.catalyst.catalog.SimpleCatalogRelation
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
import org.apache.spark.sql.catalyst.plans.UsingJoin
@@ -305,6 +306,20 @@ trait CheckAnalysis extends PredicateHelper {
|Conflicting attributes: ${conflictingAttributes.mkString(",")}
""".stripMargin)
+ case s: SimpleCatalogRelation =>
+ failAnalysis(
+ s"""
+ |Please enable Hive support when selecting the regular tables:
+ |${s.catalogTable.identifier}
+ """.stripMargin)
+
+ case InsertIntoTable(s: SimpleCatalogRelation, _, _, _, _) =>
+ failAnalysis(
+ s"""
+ |Please enable Hive support when inserting the regular tables:
+ |${s.catalogTable.identifier}
+ """.stripMargin)
+
case o if !o.resolved =>
failAnalysis(
s"unresolved operator ${operator.simpleString}")
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
index b1b3e00de1..ca0096eeb2 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
@@ -377,7 +377,8 @@ case class InsertIntoTable(
}
assert(overwrite || !ifNotExists)
- override lazy val resolved: Boolean = childrenResolved && expectedColumns.forall { expected =>
+ override lazy val resolved: Boolean =
+ childrenResolved && table.resolved && expectedColumns.forall { expected =>
child.output.size == expected.size && child.output.zip(expected).forall {
case (childAttr, tableAttr) =>
DataType.equalsIgnoreCompatibleNullability(childAttr.dataType, tableAttr.dataType)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index d72dc092e2..64f5a4ac47 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -22,6 +22,7 @@ import java.io.File
import org.apache.hadoop.fs.Path
import org.scalatest.BeforeAndAfterEach
+import org.apache.spark.internal.config._
import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.{DatabaseAlreadyExistsException, NoSuchPartitionException, NoSuchTableException}
@@ -1044,6 +1045,53 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
)
}
+ test("select/insert into the managed table") {
+ assume(spark.sparkContext.conf.get(CATALOG_IMPLEMENTATION) == "in-memory")
+ val tabName = "tbl"
+ withTable(tabName) {
+ sql(s"CREATE TABLE $tabName (i INT, j STRING)")
+ val catalogTable =
+ spark.sessionState.catalog.getTableMetadata(TableIdentifier(tabName, Some("default")))
+ assert(catalogTable.tableType == CatalogTableType.MANAGED)
+
+ var message = intercept[AnalysisException] {
+ sql(s"INSERT OVERWRITE TABLE $tabName SELECT 1, 'a'")
+ }.getMessage
+ assert(message.contains("Please enable Hive support when inserting the regular tables"))
+ message = intercept[AnalysisException] {
+ sql(s"SELECT * FROM $tabName")
+ }.getMessage
+ assert(message.contains("Please enable Hive support when selecting the regular tables"))
+ }
+ }
+
+ test("select/insert into external table") {
+ assume(spark.sparkContext.conf.get(CATALOG_IMPLEMENTATION) == "in-memory")
+ withTempDir { tempDir =>
+ val tabName = "tbl"
+ withTable(tabName) {
+ sql(
+ s"""
+ |CREATE EXTERNAL TABLE $tabName (i INT, j STRING)
+ |ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
+ |LOCATION '$tempDir'
+ """.stripMargin)
+ val catalogTable =
+ spark.sessionState.catalog.getTableMetadata(TableIdentifier(tabName, Some("default")))
+ assert(catalogTable.tableType == CatalogTableType.EXTERNAL)
+
+ var message = intercept[AnalysisException] {
+ sql(s"INSERT OVERWRITE TABLE $tabName SELECT 1, 'a'")
+ }.getMessage
+ assert(message.contains("Please enable Hive support when inserting the regular tables"))
+ message = intercept[AnalysisException] {
+ sql(s"SELECT * FROM $tabName")
+ }.getMessage
+ assert(message.contains("Please enable Hive support when selecting the regular tables"))
+ }
+ }
+ }
+
test("drop default database") {
Seq("true", "false").foreach { caseSensitive =>
withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive) {