From 86c275206605c44e1ebca2f166d62868e44bf029 Mon Sep 17 00:00:00 2001 From: Wenchen Fan Date: Sat, 23 Jul 2016 11:39:48 -0700 Subject: [SPARK-16690][TEST] rename SQLTestUtils.withTempTable to withTempView ## What changes were proposed in this pull request? after https://github.com/apache/spark/pull/12945, we renamed the `registerTempTable` to `createTempView`, as we do create a view actually. This PR renames `SQLTestUtils.withTempTable` to reflect this change. ## How was this patch tested? N/A Author: Wenchen Fan Closes #14318 from cloud-fan/minor4. --- .../scala/org/apache/spark/sql/hive/CachedTableSuite.scala | 10 +++++----- .../org/apache/spark/sql/hive/HiveMetadataCacheSuite.scala | 2 +- .../scala/org/apache/spark/sql/hive/HiveParquetSuite.scala | 4 ++-- .../apache/spark/sql/hive/MetastoreDataSourcesSuite.scala | 8 ++++---- .../spark/sql/hive/ParquetHiveCompatibilitySuite.scala | 2 +- .../spark/sql/hive/execution/AggregationQuerySuite.scala | 2 +- .../apache/spark/sql/hive/execution/HiveCommandSuite.scala | 4 ++-- .../apache/spark/sql/hive/execution/HiveExplainSuite.scala | 2 +- .../apache/spark/sql/hive/execution/HiveTableScanSuite.scala | 2 +- .../org/apache/spark/sql/hive/execution/HiveUDFSuite.scala | 2 +- .../org/apache/spark/sql/hive/execution/SQLQuerySuite.scala | 8 ++++---- .../scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala | 6 +++--- .../test/scala/org/apache/spark/sql/hive/orc/OrcTest.scala | 2 +- .../test/scala/org/apache/spark/sql/hive/parquetSuites.scala | 2 +- .../org/apache/spark/sql/sources/HadoopFsRelationTest.scala | 12 ++++++------ 15 files changed, 34 insertions(+), 34 deletions(-) (limited to 'sql/hive/src') diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala index f7c3e347b6..7d4ef6f26a 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala @@ -129,7 +129,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with TestHiveSingleto } test("CACHE TABLE tableName AS SELECT * FROM anotherTable") { - withTempTable("testCacheTable") { + withTempView("testCacheTable") { sql("CACHE TABLE testCacheTable AS SELECT * FROM src") assertCached(table("testCacheTable")) @@ -144,7 +144,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with TestHiveSingleto } test("CACHE TABLE tableName AS SELECT ...") { - withTempTable("testCacheTable") { + withTempView("testCacheTable") { sql("CACHE TABLE testCacheTable AS SELECT key FROM src LIMIT 10") assertCached(table("testCacheTable")) @@ -177,7 +177,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with TestHiveSingleto } test("CACHE TABLE with Hive UDF") { - withTempTable("udfTest") { + withTempView("udfTest") { sql("CACHE TABLE udfTest AS SELECT * FROM src WHERE floor(key) = 1") assertCached(table("udfTest")) uncacheTable("udfTest") @@ -276,7 +276,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with TestHiveSingleto test("Cache/Uncache Qualified Tables") { withTempDatabase { db => - withTempTable("cachedTable") { + withTempView("cachedTable") { sql(s"CREATE TABLE $db.cachedTable STORED AS PARQUET AS SELECT 1") sql(s"CACHE TABLE $db.cachedTable") assertCached(spark.table(s"$db.cachedTable")) @@ -298,7 +298,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with TestHiveSingleto test("Cache Table As Select - having database name") { withTempDatabase { db => - withTempTable("cachedTable") { + withTempView("cachedTable") { val e = intercept[ParseException] { sql(s"CACHE TABLE $db.cachedTable AS SELECT 1") }.getMessage diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetadataCacheSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetadataCacheSuite.scala index 5714d06f0f..3414f5e040 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetadataCacheSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetadataCacheSuite.scala @@ -30,7 +30,7 @@ import org.apache.spark.sql.test.SQLTestUtils class HiveMetadataCacheSuite extends QueryTest with SQLTestUtils with TestHiveSingleton { test("SPARK-16337 temporary view refresh") { - withTempTable("view_refresh") { + withTempView("view_refresh") { withTable("view_table") { // Create a Parquet directory spark.range(start = 0, end = 100, step = 1, numPartitions = 3) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala index 33252ad07a..09c15473b2 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala @@ -52,7 +52,7 @@ class HiveParquetSuite extends QueryTest with ParquetTest with TestHiveSingleton withTempPath { dir => sql("SELECT * FROM src").write.parquet(dir.getCanonicalPath) spark.read.parquet(dir.getCanonicalPath).createOrReplaceTempView("p") - withTempTable("p") { + withTempView("p") { checkAnswer( sql("SELECT * FROM src ORDER BY key"), sql("SELECT * from p ORDER BY key").collect().toSeq) @@ -66,7 +66,7 @@ class HiveParquetSuite extends QueryTest with ParquetTest with TestHiveSingleton withTempPath { file => sql("SELECT * FROM t LIMIT 1").write.parquet(file.getCanonicalPath) spark.read.parquet(file.getCanonicalPath).createOrReplaceTempView("p") - withTempTable("p") { + withTempView("p") { // let's do three overwrites for good measure sql("INSERT OVERWRITE TABLE p SELECT * FROM t") sql("INSERT OVERWRITE TABLE p SELECT * FROM t") diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala index 12d250d4fb..af071f95e6 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala @@ -79,7 +79,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv |) """.stripMargin) - withTempTable("expectedJsonTable") { + withTempView("expectedJsonTable") { read.json(jsonFilePath).createOrReplaceTempView("expectedJsonTable") checkAnswer( sql("SELECT a, b, `c_!@(3)`, ``.`d!`, ``.`=` FROM jsonTable"), @@ -109,7 +109,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv assert(expectedSchema === table("jsonTable").schema) - withTempTable("expectedJsonTable") { + withTempView("expectedJsonTable") { read.json(jsonFilePath).createOrReplaceTempView("expectedJsonTable") checkAnswer( sql("SELECT b, ``.`=` FROM jsonTable"), @@ -247,7 +247,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv |) """.stripMargin) - withTempTable("expectedJsonTable") { + withTempView("expectedJsonTable") { read.json(jsonFilePath).createOrReplaceTempView("expectedJsonTable") checkAnswer( @@ -553,7 +553,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv test("scan a parquet table created through a CTAS statement") { withSQLConf(HiveUtils.CONVERT_METASTORE_PARQUET.key -> "true") { - withTempTable("jt") { + withTempView("jt") { (1 to 10).map(i => i -> s"str$i").toDF("a", "b").createOrReplaceTempView("jt") withTable("test_parquet_ctas") { diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala index 2b576469e9..14266e6847 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala @@ -52,7 +52,7 @@ class ParquetHiveCompatibilitySuite extends ParquetCompatibilityTest with TestHi // Don't convert Hive metastore Parquet tables to let Hive write those Parquet files. withSQLConf(HiveUtils.CONVERT_METASTORE_PARQUET.key -> "false") { - withTempTable("data") { + withTempView("data") { val fields = hiveTypes.zipWithIndex.map { case (typ, index) => s" col_$index $typ" } val ddl = diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala index a16fe3228b..2dcf13c02a 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala @@ -923,7 +923,7 @@ abstract class AggregationQuerySuite extends QueryTest with SQLTestUtils with Te } test("udaf without specifying inputSchema") { - withTempTable("noInputSchemaUDAF") { + withTempView("noInputSchemaUDAF") { spark.udf.register("noInputSchema", new ScalaAggregateFunctionWithoutInputSchema) val data = diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala index 741abcb751..5d510197c4 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala @@ -139,7 +139,7 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto } test("show tblproperties for spark temporary table - empty row") { - withTempTable("parquet_temp") { + withTempView("parquet_temp") { sql( """ |CREATE TEMPORARY TABLE parquet_temp (c1 INT, c2 STRING) @@ -397,7 +397,7 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto } test("show partitions - empty row") { - withTempTable("parquet_temp") { + withTempView("parquet_temp") { sql( """ |CREATE TEMPORARY TABLE parquet_temp (c1 INT, c2 STRING) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala index a43eed9a2a..98afd99a20 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala @@ -78,7 +78,7 @@ class HiveExplainSuite extends QueryTest with SQLTestUtils with TestHiveSingleto } test("SPARK-6212: The EXPLAIN output of CTAS only shows the analyzed plan") { - withTempTable("jt") { + withTempView("jt") { val rdd = sparkContext.parallelize((1 to 10).map(i => s"""{"a":$i, "b":"str$i"}""")) spark.read.json(rdd).createOrReplaceTempView("jt") val outputs = sql( diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala index 76d3f3dbab..5b464764f0 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala @@ -102,7 +102,7 @@ class HiveTableScanSuite extends HiveComparisonTest with SQLTestUtils with TestH test("Verify SQLConf HIVE_METASTORE_PARTITION_PRUNING") { val view = "src" - withTempTable(view) { + withTempView(view) { spark.range(1, 5).createOrReplaceTempView(view) val table = "table_with_partition" withTable(table) { diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala index def4601cf6..f690035c84 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala @@ -358,7 +358,7 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils { } test("Hive UDF in group by") { - withTempTable("tab1") { + withTempView("tab1") { Seq(Tuple1(1451400761)).toDF("test_date").createOrReplaceTempView("tab1") sql(s"CREATE TEMPORARY FUNCTION testUDFToDate AS '${classOf[GenericUDFToDate].getName}'") val count = sql("select testUDFToDate(cast(test_date as timestamp))" + diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala index cb8f79982b..688260d27c 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala @@ -122,7 +122,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton { } test("SPARK-13651: generator outputs shouldn't be resolved from its child's output") { - withTempTable("src") { + withTempView("src") { Seq(("id1", "value1")).toDF("key", "value").createOrReplaceTempView("src") val query = sql("SELECT genoutput.* FROM src " + @@ -952,7 +952,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton { } test("Sorting columns are not in Generate") { - withTempTable("data") { + withTempView("data") { spark.range(1, 5) .select(array($"id", $"id" + 1).as("a"), $"id".as("b"), (lit(10) - $"id").as("c")) .createOrReplaceTempView("data") @@ -1229,7 +1229,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton { test("SPARK-10741: Sort on Aggregate using parquet") { withTable("test10741") { - withTempTable("src") { + withTempView("src") { Seq("a" -> 5, "a" -> 9, "b" -> 6).toDF("c1", "c2").createOrReplaceTempView("src") sql("CREATE TABLE test10741 STORED AS PARQUET AS SELECT * FROM src") } @@ -1483,7 +1483,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton { } test("multi-insert with lateral view") { - withTempTable("t1") { + withTempView("t1") { spark.range(10) .select(array($"id", $"id" + 1).as("arr"), $"id") .createOrReplaceTempView("source") diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala index b9e98fc85f..49e963ee12 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala @@ -93,7 +93,7 @@ class OrcQuerySuite extends QueryTest with BeforeAndAfterAll with OrcTest { test("Creating case class RDD table") { val data = (1 to 100).map(i => (i, s"val_$i")) sparkContext.parallelize(data).toDF().createOrReplaceTempView("t") - withTempTable("t") { + withTempView("t") { checkAnswer(sql("SELECT * FROM t"), data.toDF().collect()) } } @@ -310,7 +310,7 @@ class OrcQuerySuite extends QueryTest with BeforeAndAfterAll with OrcTest { val path = dir.getCanonicalPath withTable("empty_orc") { - withTempTable("empty", "single") { + withTempView("empty", "single") { spark.sql( s"""CREATE TABLE empty_orc(key INT, value STRING) |STORED AS ORC @@ -402,7 +402,7 @@ class OrcQuerySuite extends QueryTest with BeforeAndAfterAll with OrcTest { } test("Verify the ORC conversion parameter: CONVERT_METASTORE_ORC") { - withTempTable("single") { + withTempView("single") { val singleRowDF = Seq((0, "foo")).toDF("key", "value") singleRowDF.createOrReplaceTempView("single") diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcTest.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcTest.scala index 2a647115b7..7226ed521e 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcTest.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcTest.scala @@ -62,7 +62,7 @@ private[sql] trait OrcTest extends SQLTestUtils with TestHiveSingleton { (f: => Unit): Unit = { withOrcDataFrame(data) { df => df.createOrReplaceTempView(tableName) - withTempTable(tableName)(f) + withTempView(tableName)(f) } } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala index 96beb2d342..31b6197d56 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala @@ -702,7 +702,7 @@ class ParquetSourceSuite extends ParquetPartitioningTest { } test("Verify the PARQUET conversion parameter: CONVERT_METASTORE_PARQUET") { - withTempTable("single") { + withTempView("single") { val singleRowDF = Seq((0, "foo")).toDF("key", "value") singleRowDF.createOrReplaceTempView("single") diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala b/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala index 62998572ea..047b08c4cc 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala @@ -92,7 +92,7 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with Tes // Self-join df.createOrReplaceTempView("t") - withTempTable("t") { + withTempView("t") { checkAnswer( sql( """SELECT l.a, r.b, l.p1, r.p2 @@ -339,7 +339,7 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with Tes test("saveAsTable()/load() - non-partitioned table - ErrorIfExists") { Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t") - withTempTable("t") { + withTempView("t") { intercept[AnalysisException] { testDF.write.format(dataSourceName).mode(SaveMode.ErrorIfExists).saveAsTable("t") } @@ -349,7 +349,7 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with Tes test("saveAsTable()/load() - non-partitioned table - Ignore") { Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t") - withTempTable("t") { + withTempView("t") { testDF.write.format(dataSourceName).mode(SaveMode.Ignore).saveAsTable("t") assert(spark.table("t").collect().isEmpty) } @@ -461,7 +461,7 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with Tes test("saveAsTable()/load() - partitioned table - ErrorIfExists") { Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t") - withTempTable("t") { + withTempView("t") { intercept[AnalysisException] { partitionedTestDF.write .format(dataSourceName) @@ -476,7 +476,7 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with Tes test("saveAsTable()/load() - partitioned table - Ignore") { Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t") - withTempTable("t") { + withTempView("t") { partitionedTestDF.write .format(dataSourceName) .mode(SaveMode.Ignore) @@ -722,7 +722,7 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with Tes 'p3.cast(FloatType).as('pf1), 'f) - withTempTable("t") { + withTempView("t") { input .write .format(dataSourceName) -- cgit v1.2.3