aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala10
2 files changed, 7 insertions, 7 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
index 871b9e02eb..0f37cd7bf3 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
@@ -153,7 +153,7 @@ class OrcSourceSuite extends OrcSuite {
super.beforeAll()
spark.sql(
- s"""CREATE TEMPORARY TABLE normal_orc_source
+ s"""CREATE TEMPORARY VIEW normal_orc_source
|USING org.apache.spark.sql.hive.orc
|OPTIONS (
| PATH '${new File(orcTableAsDir.getAbsolutePath).getCanonicalPath}'
@@ -161,7 +161,7 @@ class OrcSourceSuite extends OrcSuite {
""".stripMargin)
spark.sql(
- s"""CREATE TEMPORARY TABLE normal_orc_as_source
+ s"""CREATE TEMPORARY VIEW normal_orc_as_source
|USING org.apache.spark.sql.hive.orc
|OPTIONS (
| PATH '${new File(orcTableAsDir.getAbsolutePath).getCanonicalPath}'
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala
index 6af9976ea0..fe7253d735 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala
@@ -582,7 +582,7 @@ class ParquetSourceSuite extends ParquetPartitioningTest {
"normal_parquet")
sql( s"""
- create temporary table partitioned_parquet
+ CREATE TEMPORARY VIEW partitioned_parquet
USING org.apache.spark.sql.parquet
OPTIONS (
path '${partitionedTableDir.getCanonicalPath}'
@@ -590,7 +590,7 @@ class ParquetSourceSuite extends ParquetPartitioningTest {
""")
sql( s"""
- create temporary table partitioned_parquet_with_key
+ CREATE TEMPORARY VIEW partitioned_parquet_with_key
USING org.apache.spark.sql.parquet
OPTIONS (
path '${partitionedTableDirWithKey.getCanonicalPath}'
@@ -598,7 +598,7 @@ class ParquetSourceSuite extends ParquetPartitioningTest {
""")
sql( s"""
- create temporary table normal_parquet
+ CREATE TEMPORARY VIEW normal_parquet
USING org.apache.spark.sql.parquet
OPTIONS (
path '${new File(partitionedTableDir, "p=1").getCanonicalPath}'
@@ -606,7 +606,7 @@ class ParquetSourceSuite extends ParquetPartitioningTest {
""")
sql( s"""
- CREATE TEMPORARY TABLE partitioned_parquet_with_key_and_complextypes
+ CREATE TEMPORARY VIEW partitioned_parquet_with_key_and_complextypes
USING org.apache.spark.sql.parquet
OPTIONS (
path '${partitionedTableDirWithKeyAndComplexTypes.getCanonicalPath}'
@@ -614,7 +614,7 @@ class ParquetSourceSuite extends ParquetPartitioningTest {
""")
sql( s"""
- CREATE TEMPORARY TABLE partitioned_parquet_with_complextypes
+ CREATE TEMPORARY VIEW partitioned_parquet_with_complextypes
USING org.apache.spark.sql.parquet
OPTIONS (
path '${partitionedTableDirWithComplexTypes.getCanonicalPath}'