aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2016-07-06 17:40:55 +0800
committerCheng Lian <lian@databricks.com>2016-07-06 17:40:55 +0800
commit7e28fabdff2da1cc374efbf43372d92ae0cd07aa (patch)
tree70f7a4ce8f237e494f3ea838b80f772745b3fa12 /sql/hive/src
parent5497242c769b40338bfa57d64f2c64996dfa57e8 (diff)
downloadspark-7e28fabdff2da1cc374efbf43372d92ae0cd07aa.tar.gz
spark-7e28fabdff2da1cc374efbf43372d92ae0cd07aa.tar.bz2
spark-7e28fabdff2da1cc374efbf43372d92ae0cd07aa.zip
[SPARK-16388][SQL] Remove spark.sql.nativeView and spark.sql.nativeView.canonical config
## What changes were proposed in this pull request? These two configs should always be true after Spark 2.0. This patch removes them from the config list. Note that ideally this should've gone into branch-2.0, but due to the timing of the release we should only merge this in master for Spark 2.1. ## How was this patch tested? Updated test cases. Author: Reynold Xin <rxin@databricks.com> Closes #14061 from rxin/SPARK-16388.
Diffstat (limited to 'sql/hive/src')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala206
1 files changed, 88 insertions, 118 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala
index 39846f145c..82dc64a457 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala
@@ -95,15 +95,13 @@ class SQLViewSuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
}
test("correctly parse CREATE VIEW statement") {
- withSQLConf(SQLConf.NATIVE_VIEW.key -> "true") {
- sql(
- """CREATE VIEW IF NOT EXISTS
- |default.testView (c1 COMMENT 'blabla', c2 COMMENT 'blabla')
- |TBLPROPERTIES ('a' = 'b')
- |AS SELECT * FROM jt""".stripMargin)
- checkAnswer(sql("SELECT c1, c2 FROM testView ORDER BY c1"), (1 to 9).map(i => Row(i, i)))
- sql("DROP VIEW testView")
- }
+ sql(
+ """CREATE VIEW IF NOT EXISTS
+ |default.testView (c1 COMMENT 'blabla', c2 COMMENT 'blabla')
+ |TBLPROPERTIES ('a' = 'b')
+ |AS SELECT * FROM jt""".stripMargin)
+ checkAnswer(sql("SELECT c1, c2 FROM testView ORDER BY c1"), (1 to 9).map(i => Row(i, i)))
+ sql("DROP VIEW testView")
}
test("correctly parse CREATE TEMPORARY VIEW statement") {
@@ -145,18 +143,16 @@ class SQLViewSuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
}
test("correctly handle CREATE VIEW IF NOT EXISTS") {
- withSQLConf(SQLConf.NATIVE_VIEW.key -> "true") {
- withTable("jt2") {
- withView("testView") {
- sql("CREATE VIEW testView AS SELECT id FROM jt")
+ withTable("jt2") {
+ withView("testView") {
+ sql("CREATE VIEW testView AS SELECT id FROM jt")
- val df = (1 until 10).map(i => i -> i).toDF("i", "j")
- df.write.format("json").saveAsTable("jt2")
- sql("CREATE VIEW IF NOT EXISTS testView AS SELECT * FROM jt2")
+ val df = (1 until 10).map(i => i -> i).toDF("i", "j")
+ df.write.format("json").saveAsTable("jt2")
+ sql("CREATE VIEW IF NOT EXISTS testView AS SELECT * FROM jt2")
- // make sure our view doesn't change.
- checkAnswer(sql("SELECT * FROM testView ORDER BY id"), (1 to 9).map(i => Row(i)))
- }
+ // make sure our view doesn't change.
+ checkAnswer(sql("SELECT * FROM testView ORDER BY id"), (1 to 9).map(i => Row(i)))
}
}
}
@@ -174,134 +170,108 @@ class SQLViewSuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
}
}
- Seq(true, false).foreach { enabled =>
- val prefix = (if (enabled) "With" else "Without") + " canonical native view: "
- test(s"$prefix correctly handle CREATE OR REPLACE VIEW") {
- withSQLConf(
- SQLConf.NATIVE_VIEW.key -> "true", SQLConf.CANONICAL_NATIVE_VIEW.key -> enabled.toString) {
- withTable("jt2") {
- sql("CREATE OR REPLACE VIEW testView AS SELECT id FROM jt")
- checkAnswer(sql("SELECT * FROM testView ORDER BY id"), (1 to 9).map(i => Row(i)))
-
- val df = (1 until 10).map(i => i -> i).toDF("i", "j")
- df.write.format("json").saveAsTable("jt2")
- sql("CREATE OR REPLACE VIEW testView AS SELECT * FROM jt2")
- // make sure the view has been changed.
- checkAnswer(sql("SELECT * FROM testView ORDER BY i"), (1 to 9).map(i => Row(i, i)))
-
- sql("DROP VIEW testView")
-
- val e = intercept[AnalysisException] {
- sql("CREATE OR REPLACE VIEW IF NOT EXISTS testView AS SELECT id FROM jt")
- }
- assert(e.message.contains(
- "CREATE VIEW with both IF NOT EXISTS and REPLACE is not allowed"))
- }
+ test("correctly handle CREATE OR REPLACE VIEW") {
+ withTable("jt2") {
+ sql("CREATE OR REPLACE VIEW testView AS SELECT id FROM jt")
+ checkAnswer(sql("SELECT * FROM testView ORDER BY id"), (1 to 9).map(i => Row(i)))
+
+ val df = (1 until 10).map(i => i -> i).toDF("i", "j")
+ df.write.format("json").saveAsTable("jt2")
+ sql("CREATE OR REPLACE VIEW testView AS SELECT * FROM jt2")
+ // make sure the view has been changed.
+ checkAnswer(sql("SELECT * FROM testView ORDER BY i"), (1 to 9).map(i => Row(i, i)))
+
+ sql("DROP VIEW testView")
+
+ val e = intercept[AnalysisException] {
+ sql("CREATE OR REPLACE VIEW IF NOT EXISTS testView AS SELECT id FROM jt")
}
+ assert(e.message.contains(
+ "CREATE VIEW with both IF NOT EXISTS and REPLACE is not allowed"))
}
+ }
- test(s"$prefix correctly handle ALTER VIEW") {
- withSQLConf(
- SQLConf.NATIVE_VIEW.key -> "true", SQLConf.CANONICAL_NATIVE_VIEW.key -> enabled.toString) {
- withTable("jt2") {
- withView("testView") {
- sql("CREATE VIEW testView AS SELECT id FROM jt")
-
- val df = (1 until 10).map(i => i -> i).toDF("i", "j")
- df.write.format("json").saveAsTable("jt2")
- sql("ALTER VIEW testView AS SELECT * FROM jt2")
- // make sure the view has been changed.
- checkAnswer(sql("SELECT * FROM testView ORDER BY i"), (1 to 9).map(i => Row(i, i)))
- }
- }
+ test("correctly handle ALTER VIEW") {
+ withTable("jt2") {
+ withView("testView") {
+ sql("CREATE VIEW testView AS SELECT id FROM jt")
+
+ val df = (1 until 10).map(i => i -> i).toDF("i", "j")
+ df.write.format("json").saveAsTable("jt2")
+ sql("ALTER VIEW testView AS SELECT * FROM jt2")
+ // make sure the view has been changed.
+ checkAnswer(sql("SELECT * FROM testView ORDER BY i"), (1 to 9).map(i => Row(i, i)))
}
}
+ }
- test(s"$prefix create hive view for json table") {
- // json table is not hive-compatible, make sure the new flag fix it.
- withSQLConf(
- SQLConf.NATIVE_VIEW.key -> "true", SQLConf.CANONICAL_NATIVE_VIEW.key -> enabled.toString) {
- withView("testView") {
- sql("CREATE VIEW testView AS SELECT id FROM jt")
- checkAnswer(sql("SELECT * FROM testView ORDER BY id"), (1 to 9).map(i => Row(i)))
- }
- }
+ test("create hive view for json table") {
+ // json table is not hive-compatible, make sure the new flag fix it.
+ withView("testView") {
+ sql("CREATE VIEW testView AS SELECT id FROM jt")
+ checkAnswer(sql("SELECT * FROM testView ORDER BY id"), (1 to 9).map(i => Row(i)))
}
+ }
- test(s"$prefix create hive view for partitioned parquet table") {
- // partitioned parquet table is not hive-compatible, make sure the new flag fix it.
- withSQLConf(
- SQLConf.NATIVE_VIEW.key -> "true", SQLConf.CANONICAL_NATIVE_VIEW.key -> enabled.toString) {
- withTable("parTable") {
- withView("testView") {
- val df = Seq(1 -> "a").toDF("i", "j")
- df.write.format("parquet").partitionBy("i").saveAsTable("parTable")
- sql("CREATE VIEW testView AS SELECT i, j FROM parTable")
- checkAnswer(sql("SELECT * FROM testView"), Row(1, "a"))
- }
- }
+ test("create hive view for partitioned parquet table") {
+ // partitioned parquet table is not hive-compatible, make sure the new flag fix it.
+ withTable("parTable") {
+ withView("testView") {
+ val df = Seq(1 -> "a").toDF("i", "j")
+ df.write.format("parquet").partitionBy("i").saveAsTable("parTable")
+ sql("CREATE VIEW testView AS SELECT i, j FROM parTable")
+ checkAnswer(sql("SELECT * FROM testView"), Row(1, "a"))
}
}
}
test("CTE within view") {
- withSQLConf(
- SQLConf.NATIVE_VIEW.key -> "true", SQLConf.CANONICAL_NATIVE_VIEW.key -> "true") {
- withView("cte_view") {
- sql("CREATE VIEW cte_view AS WITH w AS (SELECT 1 AS n) SELECT n FROM w")
- checkAnswer(sql("SELECT * FROM cte_view"), Row(1))
- }
+ withView("cte_view") {
+ sql("CREATE VIEW cte_view AS WITH w AS (SELECT 1 AS n) SELECT n FROM w")
+ checkAnswer(sql("SELECT * FROM cte_view"), Row(1))
}
}
test("Using view after switching current database") {
- withSQLConf(
- SQLConf.NATIVE_VIEW.key -> "true", SQLConf.CANONICAL_NATIVE_VIEW.key -> "true") {
- withView("v") {
- sql("CREATE VIEW v AS SELECT * FROM src")
- withTempDatabase { db =>
- activateDatabase(db) {
- // Should look up table `src` in database `default`.
- checkAnswer(sql("SELECT * FROM default.v"), sql("SELECT * FROM default.src"))
-
- // The new `src` table shouldn't be scanned.
- sql("CREATE TABLE src(key INT, value STRING)")
- checkAnswer(sql("SELECT * FROM default.v"), sql("SELECT * FROM default.src"))
- }
+ withView("v") {
+ sql("CREATE VIEW v AS SELECT * FROM src")
+ withTempDatabase { db =>
+ activateDatabase(db) {
+ // Should look up table `src` in database `default`.
+ checkAnswer(sql("SELECT * FROM default.v"), sql("SELECT * FROM default.src"))
+
+ // The new `src` table shouldn't be scanned.
+ sql("CREATE TABLE src(key INT, value STRING)")
+ checkAnswer(sql("SELECT * FROM default.v"), sql("SELECT * FROM default.src"))
}
}
}
}
test("Using view after adding more columns") {
- withSQLConf(
- SQLConf.NATIVE_VIEW.key -> "true", SQLConf.CANONICAL_NATIVE_VIEW.key -> "true") {
- withTable("add_col") {
- spark.range(10).write.saveAsTable("add_col")
- withView("v") {
- sql("CREATE VIEW v AS SELECT * FROM add_col")
- spark.range(10).select('id, 'id as 'a).write.mode("overwrite").saveAsTable("add_col")
- checkAnswer(sql("SELECT * FROM v"), spark.range(10).toDF())
- }
+ withTable("add_col") {
+ spark.range(10).write.saveAsTable("add_col")
+ withView("v") {
+ sql("CREATE VIEW v AS SELECT * FROM add_col")
+ spark.range(10).select('id, 'id as 'a).write.mode("overwrite").saveAsTable("add_col")
+ checkAnswer(sql("SELECT * FROM v"), spark.range(10).toDF())
}
}
}
test("create hive view for joined tables") {
// make sure the new flag can handle some complex cases like join and schema change.
- withSQLConf(SQLConf.NATIVE_VIEW.key -> "true") {
- withTable("jt1", "jt2") {
- spark.range(1, 10).toDF("id1").write.format("json").saveAsTable("jt1")
- spark.range(1, 10).toDF("id2").write.format("json").saveAsTable("jt2")
- sql("CREATE VIEW testView AS SELECT * FROM jt1 JOIN jt2 ON id1 == id2")
- checkAnswer(sql("SELECT * FROM testView ORDER BY id1"), (1 to 9).map(i => Row(i, i)))
-
- val df = (1 until 10).map(i => i -> i).toDF("id1", "newCol")
- df.write.format("json").mode(SaveMode.Overwrite).saveAsTable("jt1")
- checkAnswer(sql("SELECT * FROM testView ORDER BY id1"), (1 to 9).map(i => Row(i, i)))
-
- sql("DROP VIEW testView")
- }
+ withTable("jt1", "jt2") {
+ spark.range(1, 10).toDF("id1").write.format("json").saveAsTable("jt1")
+ spark.range(1, 10).toDF("id2").write.format("json").saveAsTable("jt2")
+ sql("CREATE VIEW testView AS SELECT * FROM jt1 JOIN jt2 ON id1 == id2")
+ checkAnswer(sql("SELECT * FROM testView ORDER BY id1"), (1 to 9).map(i => Row(i, i)))
+
+ val df = (1 until 10).map(i => i -> i).toDF("id1", "newCol")
+ df.write.format("json").mode(SaveMode.Overwrite).saveAsTable("jt1")
+ checkAnswer(sql("SELECT * FROM testView ORDER BY id1"), (1 to 9).map(i => Row(i, i)))
+
+ sql("DROP VIEW testView")
}
}