aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala77
1 files changed, 76 insertions, 1 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index dc4d099f0f..6c77a0deb5 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -26,7 +26,7 @@ import org.apache.hadoop.fs.Path
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.TableIdentifier
-import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, FunctionRegistry}
+import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, FunctionRegistry, NoSuchPartitionException}
import org.apache.spark.sql.catalyst.catalog.CatalogTableType
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.execution.datasources.{HadoopFsRelation, LogicalRelation}
@@ -341,6 +341,81 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
}
}
+ test("describe partition") {
+ withTable("partitioned_table") {
+ sql("CREATE TABLE partitioned_table (a STRING, b INT) PARTITIONED BY (c STRING, d STRING)")
+ sql("ALTER TABLE partitioned_table ADD PARTITION (c='Us', d=1)")
+
+ checkKeywordsExist(sql("DESC partitioned_table PARTITION (c='Us', d=1)"),
+ "# Partition Information",
+ "# col_name")
+
+ checkKeywordsExist(sql("DESC EXTENDED partitioned_table PARTITION (c='Us', d=1)"),
+ "# Partition Information",
+ "# col_name",
+ "Detailed Partition Information CatalogPartition(",
+ "Partition Values: [Us, 1]",
+ "Storage(Location:",
+ "Partition Parameters")
+
+ checkKeywordsExist(sql("DESC FORMATTED partitioned_table PARTITION (c='Us', d=1)"),
+ "# Partition Information",
+ "# col_name",
+ "# Detailed Partition Information",
+ "Partition Value:",
+ "Database:",
+ "Table:",
+ "Location:",
+ "Partition Parameters:",
+ "# Storage Information")
+ }
+ }
+
+ test("describe partition - error handling") {
+ withTable("partitioned_table", "datasource_table") {
+ sql("CREATE TABLE partitioned_table (a STRING, b INT) PARTITIONED BY (c STRING, d STRING)")
+ sql("ALTER TABLE partitioned_table ADD PARTITION (c='Us', d=1)")
+
+ val m = intercept[NoSuchPartitionException] {
+ sql("DESC partitioned_table PARTITION (c='Us', d=2)")
+ }.getMessage()
+ assert(m.contains("Partition not found in table"))
+
+ val m2 = intercept[AnalysisException] {
+ sql("DESC partitioned_table PARTITION (c='Us')")
+ }.getMessage()
+ assert(m2.contains("Partition spec is invalid"))
+
+ val m3 = intercept[ParseException] {
+ sql("DESC partitioned_table PARTITION (c='Us', d)")
+ }.getMessage()
+ assert(m3.contains("PARTITION specification is incomplete: `d`"))
+
+ spark
+ .range(1).select('id as 'a, 'id as 'b, 'id as 'c, 'id as 'd).write
+ .partitionBy("d")
+ .saveAsTable("datasource_table")
+ val m4 = intercept[AnalysisException] {
+ sql("DESC datasource_table PARTITION (d=2)")
+ }.getMessage()
+ assert(m4.contains("DESC PARTITION is not allowed on a datasource table"))
+
+ val m5 = intercept[AnalysisException] {
+ spark.range(10).select('id as 'a, 'id as 'b).createTempView("view1")
+ sql("DESC view1 PARTITION (c='Us', d=1)")
+ }.getMessage()
+ assert(m5.contains("DESC PARTITION is not allowed on a temporary view"))
+
+ withView("permanent_view") {
+ val m = intercept[AnalysisException] {
+ sql("CREATE VIEW permanent_view AS SELECT * FROM partitioned_table")
+ sql("DESC permanent_view PARTITION (c='Us', d=1)")
+ }.getMessage()
+ assert(m.contains("DESC PARTITION is not allowed on a view"))
+ }
+ }
+ }
+
test("SPARK-5371: union with null and sum") {
val df = Seq((1, 1)).toDF("c1", "c2")
df.createOrReplaceTempView("table1")