From 831a04f5d152d1839c0edfdf65bb728aa5957f16 Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Wed, 29 Jun 2016 17:29:17 -0700 Subject: [SPARK-16267][TEST] Replace deprecated `CREATE TEMPORARY TABLE ... USING` from testsuites. ## What changes were proposed in this pull request? After SPARK-15674, `DDLStrategy` prints out the following deprecation messages in the testsuites. ``` 12:10:53.284 WARN org.apache.spark.sql.execution.SparkStrategies$DDLStrategy: CREATE TEMPORARY TABLE normal_orc_source USING... is deprecated, please use CREATE TEMPORARY VIEW viewName USING... instead ``` Total : 40 - JDBCWriteSuite: 14 - DDLSuite: 6 - TableScanSuite: 6 - ParquetSourceSuite: 5 - OrcSourceSuite: 2 - SQLQuerySuite: 2 - HiveCommandSuite: 2 - JsonSuite: 1 - PrunedScanSuite: 1 - FilteredScanSuite 1 This PR replaces `CREATE TEMPORARY TABLE` with `CREATE TEMPORARY VIEW` in order to remove the deprecation messages in the above testsuites except `DDLSuite`, `SQLQuerySuite`, `HiveCommandSuite`. The Jenkins results shows only remaining 10 messages. https://amplab.cs.berkeley.edu/jenkins/job/SparkPullRequestBuilder/61422/consoleFull ## How was this patch tested? This is a testsuite-only change. Author: Dongjoon Hyun Closes #13956 from dongjoon-hyun/SPARK-16267. --- .../spark/sql/execution/datasources/json/JsonSuite.scala | 2 +- .../scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala | 4 ++-- .../org/apache/spark/sql/sources/FilteredScanSuite.scala | 2 +- .../scala/org/apache/spark/sql/sources/PrunedScanSuite.scala | 2 +- .../scala/org/apache/spark/sql/sources/TableScanSuite.scala | 12 ++++++------ .../scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala | 4 ++-- .../test/scala/org/apache/spark/sql/hive/parquetSuites.scala | 10 +++++----- 7 files changed, 18 insertions(+), 18 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala index 9f35c02d48..6c72019702 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala @@ -847,7 +847,7 @@ class JsonSuite extends QueryTest with SharedSQLContext with TestJsonData { sql( s""" - |CREATE TEMPORARY TABLE jsonTableSQL + |CREATE TEMPORARY VIEW jsonTableSQL |USING org.apache.spark.sql.json |OPTIONS ( | path '$path' diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala index 48fa5f9822..ff66f53fcf 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala @@ -57,14 +57,14 @@ class JDBCWriteSuite extends SharedSQLContext with BeforeAndAfter { sql( s""" - |CREATE TEMPORARY TABLE PEOPLE + |CREATE OR REPLACE TEMPORARY VIEW PEOPLE |USING org.apache.spark.sql.jdbc |OPTIONS (url '$url1', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass') """.stripMargin.replaceAll("\n", " ")) sql( s""" - |CREATE TEMPORARY TABLE PEOPLE1 + |CREATE OR REPLACE TEMPORARY VIEW PEOPLE1 |USING org.apache.spark.sql.jdbc |OPTIONS (url '$url1', dbtable 'TEST.PEOPLE1', user 'testUser', password 'testPass') """.stripMargin.replaceAll("\n", " ")) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/FilteredScanSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/FilteredScanSuite.scala index 45e737f5ed..be56c964a1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/FilteredScanSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/FilteredScanSuite.scala @@ -139,7 +139,7 @@ class FilteredScanSuite extends DataSourceTest with SharedSQLContext with Predic super.beforeAll() sql( """ - |CREATE TEMPORARY TABLE oneToTenFiltered + |CREATE TEMPORARY VIEW oneToTenFiltered |USING org.apache.spark.sql.sources.FilteredScanSource |OPTIONS ( | from '1', diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/PrunedScanSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/PrunedScanSuite.scala index 207f89d3ea..fb6123d1cc 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/PrunedScanSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/PrunedScanSuite.scala @@ -62,7 +62,7 @@ class PrunedScanSuite extends DataSourceTest with SharedSQLContext { super.beforeAll() sql( """ - |CREATE TEMPORARY TABLE oneToTenPruned + |CREATE TEMPORARY VIEW oneToTenPruned |USING org.apache.spark.sql.sources.PrunedScanSource |OPTIONS ( | from '1', diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala index d486fa8f33..e8fed039fa 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala @@ -137,7 +137,7 @@ class TableScanSuite extends DataSourceTest with SharedSQLContext { super.beforeAll() sql( """ - |CREATE TEMPORARY TABLE oneToTen + |CREATE TEMPORARY VIEW oneToTen |USING org.apache.spark.sql.sources.SimpleScanSource |OPTIONS ( | From '1', @@ -149,7 +149,7 @@ class TableScanSuite extends DataSourceTest with SharedSQLContext { sql( """ - |CREATE TEMPORARY TABLE tableWithSchema ( + |CREATE TEMPORARY VIEW tableWithSchema ( |`string$%Field` stRIng, |binaryField binary, |`booleanField` boolean, @@ -332,7 +332,7 @@ class TableScanSuite extends DataSourceTest with SharedSQLContext { test("defaultSource") { sql( """ - |CREATE TEMPORARY TABLE oneToTenDef + |CREATE TEMPORARY VIEW oneToTenDef |USING org.apache.spark.sql.sources |OPTIONS ( | from '1', @@ -351,7 +351,7 @@ class TableScanSuite extends DataSourceTest with SharedSQLContext { val schemaNotAllowed = intercept[Exception] { sql( """ - |CREATE TEMPORARY TABLE relationProvierWithSchema (i int) + |CREATE TEMPORARY VIEW relationProvierWithSchema (i int) |USING org.apache.spark.sql.sources.SimpleScanSource |OPTIONS ( | From '1', @@ -364,7 +364,7 @@ class TableScanSuite extends DataSourceTest with SharedSQLContext { val schemaNeeded = intercept[Exception] { sql( """ - |CREATE TEMPORARY TABLE schemaRelationProvierWithoutSchema + |CREATE TEMPORARY VIEW schemaRelationProvierWithoutSchema |USING org.apache.spark.sql.sources.AllDataTypesScanSource |OPTIONS ( | From '1', @@ -378,7 +378,7 @@ class TableScanSuite extends DataSourceTest with SharedSQLContext { test("SPARK-5196 schema field with comment") { sql( """ - |CREATE TEMPORARY TABLE student(name string comment "SN", age int comment "SA", grade int) + |CREATE TEMPORARY VIEW student(name string comment "SN", age int comment "SA", grade int) |USING org.apache.spark.sql.sources.AllDataTypesScanSource |OPTIONS ( | from '1', diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala index 871b9e02eb..0f37cd7bf3 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala @@ -153,7 +153,7 @@ class OrcSourceSuite extends OrcSuite { super.beforeAll() spark.sql( - s"""CREATE TEMPORARY TABLE normal_orc_source + s"""CREATE TEMPORARY VIEW normal_orc_source |USING org.apache.spark.sql.hive.orc |OPTIONS ( | PATH '${new File(orcTableAsDir.getAbsolutePath).getCanonicalPath}' @@ -161,7 +161,7 @@ class OrcSourceSuite extends OrcSuite { """.stripMargin) spark.sql( - s"""CREATE TEMPORARY TABLE normal_orc_as_source + s"""CREATE TEMPORARY VIEW normal_orc_as_source |USING org.apache.spark.sql.hive.orc |OPTIONS ( | PATH '${new File(orcTableAsDir.getAbsolutePath).getCanonicalPath}' diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala index 6af9976ea0..fe7253d735 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala @@ -582,7 +582,7 @@ class ParquetSourceSuite extends ParquetPartitioningTest { "normal_parquet") sql( s""" - create temporary table partitioned_parquet + CREATE TEMPORARY VIEW partitioned_parquet USING org.apache.spark.sql.parquet OPTIONS ( path '${partitionedTableDir.getCanonicalPath}' @@ -590,7 +590,7 @@ class ParquetSourceSuite extends ParquetPartitioningTest { """) sql( s""" - create temporary table partitioned_parquet_with_key + CREATE TEMPORARY VIEW partitioned_parquet_with_key USING org.apache.spark.sql.parquet OPTIONS ( path '${partitionedTableDirWithKey.getCanonicalPath}' @@ -598,7 +598,7 @@ class ParquetSourceSuite extends ParquetPartitioningTest { """) sql( s""" - create temporary table normal_parquet + CREATE TEMPORARY VIEW normal_parquet USING org.apache.spark.sql.parquet OPTIONS ( path '${new File(partitionedTableDir, "p=1").getCanonicalPath}' @@ -606,7 +606,7 @@ class ParquetSourceSuite extends ParquetPartitioningTest { """) sql( s""" - CREATE TEMPORARY TABLE partitioned_parquet_with_key_and_complextypes + CREATE TEMPORARY VIEW partitioned_parquet_with_key_and_complextypes USING org.apache.spark.sql.parquet OPTIONS ( path '${partitionedTableDirWithKeyAndComplexTypes.getCanonicalPath}' @@ -614,7 +614,7 @@ class ParquetSourceSuite extends ParquetPartitioningTest { """) sql( s""" - CREATE TEMPORARY TABLE partitioned_parquet_with_complextypes + CREATE TEMPORARY VIEW partitioned_parquet_with_complextypes USING org.apache.spark.sql.parquet OPTIONS ( path '${partitionedTableDirWithComplexTypes.getCanonicalPath}' -- cgit v1.2.3