aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2017-01-22 20:37:37 -0800
committergatorsmile <gatorsmile@gmail.com>2017-01-22 20:37:37 -0800
commit772035e771a75593f031a8e78080bb58b8218e04 (patch)
tree8f8c56f1b57c594624f2d372cd18a9df133c2731 /sql/core/src/test
parent74e65cb74a8c023870a2ac9b1216c9d89c02f014 (diff)
downloadspark-772035e771a75593f031a8e78080bb58b8218e04.tar.gz
spark-772035e771a75593f031a8e78080bb58b8218e04.tar.bz2
spark-772035e771a75593f031a8e78080bb58b8218e04.zip
[SPARK-19229][SQL] Disallow Creating Hive Source Tables when Hive Support is Not Enabled
### What changes were proposed in this pull request? It is weird to create Hive source tables when using InMemoryCatalog. We are unable to operate it. This PR is to block users to create Hive source tables. ### How was this patch tested? Fixed the test cases Author: gatorsmile <gatorsmile@gmail.com> Closes #16587 from gatorsmile/blockHiveTable.
Diffstat (limited to 'sql/core/src/test')
-rw-r--r--sql/core/src/test/resources/sql-tests/inputs/change-column.sql4
-rw-r--r--sql/core/src/test/resources/sql-tests/inputs/describe.sql2
-rw-r--r--sql/core/src/test/resources/sql-tests/inputs/show-tables.sql4
-rw-r--r--sql/core/src/test/resources/sql-tests/inputs/show_columns.sql4
-rw-r--r--sql/core/src/test/resources/sql-tests/results/change-column.sql.out4
-rw-r--r--sql/core/src/test/resources/sql-tests/results/describe.sql.out2
-rw-r--r--sql/core/src/test/resources/sql-tests/results/show-tables.sql.out4
-rw-r--r--sql/core/src/test/resources/sql-tests/results/show_columns.sql.out4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala79
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala1
10 files changed, 50 insertions, 58 deletions
diff --git a/sql/core/src/test/resources/sql-tests/inputs/change-column.sql b/sql/core/src/test/resources/sql-tests/inputs/change-column.sql
index 818b19c50f..ad0f885f63 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/change-column.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/change-column.sql
@@ -1,5 +1,5 @@
-- Create the origin table
-CREATE TABLE test_change(a INT, b STRING, c INT);
+CREATE TABLE test_change(a INT, b STRING, c INT) using parquet;
DESC test_change;
-- Change column name (not supported yet)
@@ -47,7 +47,7 @@ CREATE GLOBAL TEMPORARY VIEW global_temp_view(a, b) AS SELECT 1, "one";
ALTER TABLE global_temp.global_temp_view CHANGE a a INT COMMENT 'this is column a';
-- Change column in partition spec (not supported yet)
-CREATE TABLE partition_table(a INT, b STRING) PARTITIONED BY (c INT, d STRING);
+CREATE TABLE partition_table(a INT, b STRING, c INT, d STRING) USING parquet PARTITIONED BY (c, d);
ALTER TABLE partition_table PARTITION (c = 1) CHANGE COLUMN a new_a INT;
-- DROP TEST TABLE
diff --git a/sql/core/src/test/resources/sql-tests/inputs/describe.sql b/sql/core/src/test/resources/sql-tests/inputs/describe.sql
index 84503d0b12..ff327f5e82 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/describe.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/describe.sql
@@ -1,4 +1,4 @@
-CREATE TABLE t (a STRING, b INT) PARTITIONED BY (c STRING, d STRING);
+CREATE TABLE t (a STRING, b INT, c STRING, d STRING) USING parquet PARTITIONED BY (c, d);
ALTER TABLE t ADD PARTITION (c='Us', d=1);
diff --git a/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql b/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql
index 18d02e150e..10c379dfa0 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql
@@ -1,9 +1,9 @@
-- Test data.
CREATE DATABASE showdb;
USE showdb;
-CREATE TABLE show_t1(a String, b Int) PARTITIONED BY (c String, d String);
+CREATE TABLE show_t1(a String, b Int, c String, d String) USING parquet PARTITIONED BY (c, d);
ALTER TABLE show_t1 ADD PARTITION (c='Us', d=1);
-CREATE TABLE show_t2(b String, d Int);
+CREATE TABLE show_t2(b String, d Int) USING parquet;
CREATE TEMPORARY VIEW show_t3(e int) USING parquet;
CREATE GLOBAL TEMP VIEW show_t4 AS SELECT 1 as col1;
diff --git a/sql/core/src/test/resources/sql-tests/inputs/show_columns.sql b/sql/core/src/test/resources/sql-tests/inputs/show_columns.sql
index 3894082255..1e02c2f045 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/show_columns.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/show_columns.sql
@@ -2,8 +2,8 @@ CREATE DATABASE showdb;
USE showdb;
-CREATE TABLE showcolumn1 (col1 int, `col 2` int);
-CREATE TABLE showcolumn2 (price int, qty int) partitioned by (year int, month int);
+CREATE TABLE showcolumn1 (col1 int, `col 2` int) USING parquet;
+CREATE TABLE showcolumn2 (price int, qty int, year int, month int) USING parquet partitioned by (year, month);
CREATE TEMPORARY VIEW showColumn3 (col3 int, `col 4` int) USING parquet;
CREATE GLOBAL TEMP VIEW showColumn4 AS SELECT 1 as col1, 'abc' as `col 5`;
diff --git a/sql/core/src/test/resources/sql-tests/results/change-column.sql.out b/sql/core/src/test/resources/sql-tests/results/change-column.sql.out
index 156ddb86ad..59eb56920c 100644
--- a/sql/core/src/test/resources/sql-tests/results/change-column.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/change-column.sql.out
@@ -3,7 +3,7 @@
-- !query 0
-CREATE TABLE test_change(a INT, b STRING, c INT)
+CREATE TABLE test_change(a INT, b STRING, c INT) using parquet
-- !query 0 schema
struct<>
-- !query 0 output
@@ -269,7 +269,7 @@ Database 'global_temp' not found;
-- !query 28
-CREATE TABLE partition_table(a INT, b STRING) PARTITIONED BY (c INT, d STRING)
+CREATE TABLE partition_table(a INT, b STRING, c INT, d STRING) USING parquet PARTITIONED BY (c, d)
-- !query 28 schema
struct<>
-- !query 28 output
diff --git a/sql/core/src/test/resources/sql-tests/results/describe.sql.out b/sql/core/src/test/resources/sql-tests/results/describe.sql.out
index b448d60c76..0a11c1cde2 100644
--- a/sql/core/src/test/resources/sql-tests/results/describe.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/describe.sql.out
@@ -3,7 +3,7 @@
-- !query 0
-CREATE TABLE t (a STRING, b INT) PARTITIONED BY (c STRING, d STRING)
+CREATE TABLE t (a STRING, b INT, c STRING, d STRING) USING parquet PARTITIONED BY (c, d)
-- !query 0 schema
struct<>
-- !query 0 output
diff --git a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out
index 904601bf11..3d287f43ac 100644
--- a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out
@@ -19,7 +19,7 @@ struct<>
-- !query 2
-CREATE TABLE show_t1(a String, b Int) PARTITIONED BY (c String, d String)
+CREATE TABLE show_t1(a String, b Int, c String, d String) USING parquet PARTITIONED BY (c, d)
-- !query 2 schema
struct<>
-- !query 2 output
@@ -35,7 +35,7 @@ struct<>
-- !query 4
-CREATE TABLE show_t2(b String, d Int)
+CREATE TABLE show_t2(b String, d Int) USING parquet
-- !query 4 schema
struct<>
-- !query 4 output
diff --git a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out
index 832e6e25bb..05c3a083ee 100644
--- a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out
@@ -19,7 +19,7 @@ struct<>
-- !query 2
-CREATE TABLE showcolumn1 (col1 int, `col 2` int)
+CREATE TABLE showcolumn1 (col1 int, `col 2` int) USING parquet
-- !query 2 schema
struct<>
-- !query 2 output
@@ -27,7 +27,7 @@ struct<>
-- !query 3
-CREATE TABLE showcolumn2 (price int, qty int) partitioned by (year int, month int)
+CREATE TABLE showcolumn2 (price int, qty int, year int, month int) USING parquet partitioned by (year, month)
-- !query 3 schema
struct<>
-- !query 3 output
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index b4c9e276ec..51f5946c19 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -97,7 +97,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
.add("col2", "string")
.add("a", "int")
.add("b", "int"),
- provider = Some("hive"),
+ provider = Some("parquet"),
partitionColumnNames = Seq("a", "b"),
createTime = 0L,
tracksPartitionsInCatalog = true)
@@ -759,7 +759,8 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
testUnsetProperties(isDatasourceTable = true)
}
- test("alter table: set serde") {
+ // TODO: move this test to HiveDDLSuite.scala
+ ignore("alter table: set serde") {
testSetSerde(isDatasourceTable = false)
}
@@ -767,7 +768,8 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
testSetSerde(isDatasourceTable = true)
}
- test("alter table: set serde partition") {
+ // TODO: move this test to HiveDDLSuite.scala
+ ignore("alter table: set serde partition") {
testSetSerdePartition(isDatasourceTable = false)
}
@@ -1480,49 +1482,31 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
)
}
- test("select/insert into the managed table") {
+ test("create a managed Hive source table") {
assume(spark.sparkContext.conf.get(CATALOG_IMPLEMENTATION) == "in-memory")
val tabName = "tbl"
withTable(tabName) {
- sql(s"CREATE TABLE $tabName (i INT, j STRING)")
- val catalogTable =
- spark.sessionState.catalog.getTableMetadata(TableIdentifier(tabName, Some("default")))
- assert(catalogTable.tableType == CatalogTableType.MANAGED)
-
- var message = intercept[AnalysisException] {
- sql(s"INSERT OVERWRITE TABLE $tabName SELECT 1, 'a'")
- }.getMessage
- assert(message.contains("Hive support is required to insert into the following tables"))
- message = intercept[AnalysisException] {
- sql(s"SELECT * FROM $tabName")
+ val e = intercept[AnalysisException] {
+ sql(s"CREATE TABLE $tabName (i INT, j STRING)")
}.getMessage
- assert(message.contains("Hive support is required to select over the following tables"))
+ assert(e.contains("Hive support is required to CREATE Hive TABLE"))
}
}
- test("select/insert into external table") {
+ test("create an external Hive source table") {
assume(spark.sparkContext.conf.get(CATALOG_IMPLEMENTATION) == "in-memory")
withTempDir { tempDir =>
val tabName = "tbl"
withTable(tabName) {
- sql(
- s"""
- |CREATE EXTERNAL TABLE $tabName (i INT, j STRING)
- |ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
- |LOCATION '$tempDir'
+ val e = intercept[AnalysisException] {
+ sql(
+ s"""
+ |CREATE EXTERNAL TABLE $tabName (i INT, j STRING)
+ |ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
+ |LOCATION '${tempDir.toURI}'
""".stripMargin)
- val catalogTable =
- spark.sessionState.catalog.getTableMetadata(TableIdentifier(tabName, Some("default")))
- assert(catalogTable.tableType == CatalogTableType.EXTERNAL)
-
- var message = intercept[AnalysisException] {
- sql(s"INSERT OVERWRITE TABLE $tabName SELECT 1, 'a'")
- }.getMessage
- assert(message.contains("Hive support is required to insert into the following tables"))
- message = intercept[AnalysisException] {
- sql(s"SELECT * FROM $tabName")
}.getMessage
- assert(message.contains("Hive support is required to select over the following tables"))
+ assert(e.contains("Hive support is required to CREATE Hive TABLE"))
}
}
}
@@ -1583,7 +1567,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
sql("USE temp")
sql("DROP DATABASE temp")
val e = intercept[AnalysisException] {
- sql("CREATE TABLE t (a INT, b INT)")
+ sql("CREATE TABLE t (a INT, b INT) USING parquet")
}.getMessage
assert(e.contains("Database 'temp' not found"))
}
@@ -1693,20 +1677,27 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
test("truncate table - external table, temporary table, view (not allowed)") {
import testImplicits._
- val path = Utils.createTempDir().getAbsolutePath
- (1 to 10).map { i => (i, i) }.toDF("a", "b").createTempView("my_temp_tab")
- sql(s"CREATE EXTERNAL TABLE my_ext_tab LOCATION '$path'")
- sql(s"CREATE VIEW my_view AS SELECT 1")
- intercept[NoSuchTableException] {
- sql("TRUNCATE TABLE my_temp_tab")
+ withTempPath { tempDir =>
+ withTable("my_ext_tab") {
+ (("a", "b") :: Nil).toDF().write.parquet(tempDir.getCanonicalPath)
+ (1 to 10).map { i => (i, i) }.toDF("a", "b").createTempView("my_temp_tab")
+ sql(s"CREATE TABLE my_ext_tab using parquet LOCATION '${tempDir.toURI}'")
+ sql(s"CREATE VIEW my_view AS SELECT 1")
+ intercept[NoSuchTableException] {
+ sql("TRUNCATE TABLE my_temp_tab")
+ }
+ assertUnsupported("TRUNCATE TABLE my_ext_tab")
+ assertUnsupported("TRUNCATE TABLE my_view")
+ }
}
- assertUnsupported("TRUNCATE TABLE my_ext_tab")
- assertUnsupported("TRUNCATE TABLE my_view")
}
test("truncate table - non-partitioned table (not allowed)") {
- sql("CREATE TABLE my_tab (age INT, name STRING)")
- assertUnsupported("TRUNCATE TABLE my_tab PARTITION (age=10)")
+ withTable("my_tab") {
+ sql("CREATE TABLE my_tab (age INT, name STRING) using parquet")
+ sql("INSERT INTO my_tab values (10, 'a')")
+ assertUnsupported("TRUNCATE TABLE my_tab PARTITION (age=10)")
+ }
}
test("SPARK-16034 Partition columns should match when appending to existing data source tables") {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
index 801912f441..75723d0abc 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
@@ -47,6 +47,7 @@ class CatalogSuite
private val utils = new CatalogTestUtils {
override val tableInputFormat: String = "com.fruit.eyephone.CameraInputFormat"
override val tableOutputFormat: String = "com.fruit.eyephone.CameraOutputFormat"
+ override val defaultProvider: String = "parquet"
override def newEmptyCatalog(): ExternalCatalog = spark.sharedState.externalCatalog
}