aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src/test
diff options
context:
space:
mode:
authorSean Zhong <seanzhong@databricks.com>2016-05-18 09:01:59 +0800
committerCheng Lian <lian@databricks.com>2016-05-18 09:01:59 +0800
commit25b315e6cad7c27b62dcaa2c194293c1115fdfb3 (patch)
treecfeebcaf553d78ca80a70f7139a765e7759f0410 /sql/hive/src/test
parentb674e67c22bf663334e537e35787c00533adbb04 (diff)
downloadspark-25b315e6cad7c27b62dcaa2c194293c1115fdfb3.tar.gz
spark-25b315e6cad7c27b62dcaa2c194293c1115fdfb3.tar.bz2
spark-25b315e6cad7c27b62dcaa2c194293c1115fdfb3.zip
[SPARK-15171][SQL] Remove the references to deprecated method dataset.registerTempTable
## What changes were proposed in this pull request? Update the unit test code, examples, and documents to remove calls to deprecated method `dataset.registerTempTable`. ## How was this patch tested? This PR only changes the unit test code, examples, and comments. It should be safe. This is a follow up of PR https://github.com/apache/spark/pull/12945 which was merged. Author: Sean Zhong <seanzhong@databricks.com> Closes #13098 from clockfly/spark-15171-remove-deprecation.
Diffstat (limited to 'sql/hive/src/test')
-rw-r--r--sql/hive/src/test/java/org/apache/spark/sql/hive/JavaDataFrameSuite.java2
-rw-r--r--sql/hive/src/test/java/org/apache/spark/sql/hive/JavaMetastoreDataSourcesSuite.java2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala8
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala12
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala8
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/UDFSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala8
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveOperatorQueryableSuite.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HivePlanTest.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala18
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveResolutionSuite.scala10
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala22
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala66
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLWindowFunctionSuite.scala16
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcPartitionDiscoverySuite.scala8
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala12
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala9
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala10
26 files changed, 120 insertions, 119 deletions
diff --git a/sql/hive/src/test/java/org/apache/spark/sql/hive/JavaDataFrameSuite.java b/sql/hive/src/test/java/org/apache/spark/sql/hive/JavaDataFrameSuite.java
index 64f2ded447..f664d5a4cd 100644
--- a/sql/hive/src/test/java/org/apache/spark/sql/hive/JavaDataFrameSuite.java
+++ b/sql/hive/src/test/java/org/apache/spark/sql/hive/JavaDataFrameSuite.java
@@ -57,7 +57,7 @@ public class JavaDataFrameSuite {
jsonObjects.add("{\"key\":" + i + ", \"value\":\"str" + i + "\"}");
}
df = hc.read().json(sc.parallelize(jsonObjects));
- df.registerTempTable("window_table");
+ df.createOrReplaceTempView("window_table");
}
@After
diff --git a/sql/hive/src/test/java/org/apache/spark/sql/hive/JavaMetastoreDataSourcesSuite.java b/sql/hive/src/test/java/org/apache/spark/sql/hive/JavaMetastoreDataSourcesSuite.java
index f13c32db9d..e73117c814 100644
--- a/sql/hive/src/test/java/org/apache/spark/sql/hive/JavaMetastoreDataSourcesSuite.java
+++ b/sql/hive/src/test/java/org/apache/spark/sql/hive/JavaMetastoreDataSourcesSuite.java
@@ -85,7 +85,7 @@ public class JavaMetastoreDataSourcesSuite {
}
JavaRDD<String> rdd = sc.parallelize(jsonObjects);
df = sqlContext.read().json(rdd);
- df.registerTempTable("jsonTable");
+ df.createOrReplaceTempView("jsonTable");
}
@After
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala
index d96eb0169e..d2cb62c617 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala
@@ -33,8 +33,8 @@ class ErrorPositionSuite extends QueryTest with TestHiveSingleton with BeforeAnd
if (spark.wrapped.tableNames().contains("src")) {
spark.catalog.dropTempView("src")
}
- Seq((1, "")).toDF("key", "value").registerTempTable("src")
- Seq((1, 1, 1)).toDF("a", "a", "b").registerTempTable("dupAttributes")
+ Seq((1, "")).toDF("key", "value").createOrReplaceTempView("src")
+ Seq((1, 1, 1)).toDF("a", "a", "b").createOrReplaceTempView("dupAttributes")
}
override protected def afterEach(): Unit = {
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala
index b5af758a65..e2304b5397 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala
@@ -51,7 +51,7 @@ class HiveParquetSuite extends QueryTest with ParquetTest with TestHiveSingleton
test("Converting Hive to Parquet Table via saveAsParquetFile") {
withTempPath { dir =>
sql("SELECT * FROM src").write.parquet(dir.getCanonicalPath)
- hiveContext.read.parquet(dir.getCanonicalPath).registerTempTable("p")
+ hiveContext.read.parquet(dir.getCanonicalPath).createOrReplaceTempView("p")
withTempTable("p") {
checkAnswer(
sql("SELECT * FROM src ORDER BY key"),
@@ -65,7 +65,7 @@ class HiveParquetSuite extends QueryTest with ParquetTest with TestHiveSingleton
withParquetTable((1 to 10).map(i => (i, s"val_$i")), "t", false) {
withTempPath { file =>
sql("SELECT * FROM t LIMIT 1").write.parquet(file.getCanonicalPath)
- hiveContext.read.parquet(file.getCanonicalPath).registerTempTable("p")
+ hiveContext.read.parquet(file.getCanonicalPath).createOrReplaceTempView("p")
withTempTable("p") {
// let's do three overwrites for good measure
sql("INSERT OVERWRITE TABLE p SELECT * FROM t")
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
index d05a3623ae..a4bbe96cf8 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
@@ -355,7 +355,7 @@ object TemporaryHiveUDFTest extends Logging {
""".stripMargin)
val source =
hiveContext.createDataFrame((1 to 10).map(i => (i, s"str$i"))).toDF("key", "val")
- source.registerTempTable("sourceTable")
+ source.createOrReplaceTempView("sourceTable")
// Actually use the loaded UDF.
logInfo("Using the UDF.")
val result = hiveContext.sql(
@@ -393,7 +393,7 @@ object PermanentHiveUDFTest1 extends Logging {
""".stripMargin)
val source =
hiveContext.createDataFrame((1 to 10).map(i => (i, s"str$i"))).toDF("key", "val")
- source.registerTempTable("sourceTable")
+ source.createOrReplaceTempView("sourceTable")
// Actually use the loaded UDF.
logInfo("Using the UDF.")
val result = hiveContext.sql(
@@ -429,7 +429,7 @@ object PermanentHiveUDFTest2 extends Logging {
hiveContext.sessionState.catalog.createFunction(function, ignoreIfExists = false)
val source =
hiveContext.createDataFrame((1 to 10).map(i => (i, s"str$i"))).toDF("key", "val")
- source.registerTempTable("sourceTable")
+ source.createOrReplaceTempView("sourceTable")
// Actually use the loaded UDF.
logInfo("Using the UDF.")
val result = hiveContext.sql(
@@ -491,7 +491,7 @@ object SparkSubmitClassLoaderTest extends Logging {
""".stripMargin)
val source =
hiveContext.createDataFrame((1 to 10).map(i => (i, s"str$i"))).toDF("key", "val")
- source.registerTempTable("sourceTable")
+ source.createOrReplaceTempView("sourceTable")
// Load a Hive SerDe from the jar.
logInfo("Creating a Hive table with a SerDe provided in a jar.")
hiveContext.sql(
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
index 883cdac110..b256845620 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
@@ -45,8 +45,8 @@ class InsertIntoHiveTableSuite extends QueryTest with TestHiveSingleton with Bef
// Since every we are doing tests for DDL statements,
// it is better to reset before every test.
hiveContext.reset()
- // Register the testData, which will be used in every test.
- testData.registerTempTable("testData")
+ // Creates a temporary view with testData, which will be used in all tests.
+ testData.createOrReplaceTempView("testData")
}
test("insertInto() HiveTable") {
@@ -98,7 +98,7 @@ class InsertIntoHiveTableSuite extends QueryTest with TestHiveSingleton with Bef
val rowRDD = hiveContext.sparkContext.parallelize(
(1 to 100).map(i => Row(scala.collection.mutable.HashMap(s"key$i" -> s"value$i"))))
val df = hiveContext.createDataFrame(rowRDD, schema)
- df.registerTempTable("tableWithMapValue")
+ df.createOrReplaceTempView("tableWithMapValue")
sql("CREATE TABLE hiveTableWithMapValue(m MAP <STRING, STRING>)")
sql("INSERT OVERWRITE TABLE hiveTableWithMapValue SELECT m FROM tableWithMapValue")
@@ -171,7 +171,7 @@ class InsertIntoHiveTableSuite extends QueryTest with TestHiveSingleton with Bef
StructField("a", ArrayType(StringType, containsNull = false))))
val rowRDD = hiveContext.sparkContext.parallelize((1 to 100).map(i => Row(Seq(s"value$i"))))
val df = hiveContext.createDataFrame(rowRDD, schema)
- df.registerTempTable("tableWithArrayValue")
+ df.createOrReplaceTempView("tableWithArrayValue")
sql("CREATE TABLE hiveTableWithArrayValue(a Array <STRING>)")
sql("INSERT OVERWRITE TABLE hiveTableWithArrayValue SELECT a FROM tableWithArrayValue")
@@ -188,7 +188,7 @@ class InsertIntoHiveTableSuite extends QueryTest with TestHiveSingleton with Bef
val rowRDD = hiveContext.sparkContext.parallelize(
(1 to 100).map(i => Row(Map(s"key$i" -> s"value$i"))))
val df = hiveContext.createDataFrame(rowRDD, schema)
- df.registerTempTable("tableWithMapValue")
+ df.createOrReplaceTempView("tableWithMapValue")
sql("CREATE TABLE hiveTableWithMapValue(m Map <STRING, STRING>)")
sql("INSERT OVERWRITE TABLE hiveTableWithMapValue SELECT m FROM tableWithMapValue")
@@ -205,7 +205,7 @@ class InsertIntoHiveTableSuite extends QueryTest with TestHiveSingleton with Bef
val rowRDD = hiveContext.sparkContext.parallelize(
(1 to 100).map(i => Row(Row(s"value$i"))))
val df = hiveContext.createDataFrame(rowRDD, schema)
- df.registerTempTable("tableWithStructValue")
+ df.createOrReplaceTempView("tableWithStructValue")
sql("CREATE TABLE hiveTableWithStructValue(s Struct <f: STRING>)")
sql("INSERT OVERWRITE TABLE hiveTableWithStructValue SELECT s FROM tableWithStructValue")
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
index b507018e58..00adb9a44b 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
@@ -80,7 +80,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
""".stripMargin)
withTempTable("expectedJsonTable") {
- read.json(jsonFilePath).registerTempTable("expectedJsonTable")
+ read.json(jsonFilePath).createOrReplaceTempView("expectedJsonTable")
checkAnswer(
sql("SELECT a, b, `c_!@(3)`, `<d>`.`d!`, `<d>`.`=` FROM jsonTable"),
sql("SELECT a, b, `c_!@(3)`, `<d>`.`d!`, `<d>`.`=` FROM expectedJsonTable"))
@@ -110,7 +110,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
assert(expectedSchema === table("jsonTable").schema)
withTempTable("expectedJsonTable") {
- read.json(jsonFilePath).registerTempTable("expectedJsonTable")
+ read.json(jsonFilePath).createOrReplaceTempView("expectedJsonTable")
checkAnswer(
sql("SELECT b, `<d>`.`=` FROM jsonTable"),
sql("SELECT b, `<d>`.`=` FROM expectedJsonTable"))
@@ -248,7 +248,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
""".stripMargin)
withTempTable("expectedJsonTable") {
- read.json(jsonFilePath).registerTempTable("expectedJsonTable")
+ read.json(jsonFilePath).createOrReplaceTempView("expectedJsonTable")
checkAnswer(
sql("SELECT * FROM jsonTable"),
@@ -554,7 +554,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
test("scan a parquet table created through a CTAS statement") {
withSQLConf(HiveUtils.CONVERT_METASTORE_PARQUET.key -> "true") {
withTempTable("jt") {
- (1 to 10).map(i => i -> s"str$i").toDF("a", "b").registerTempTable("jt")
+ (1 to 10).map(i => i -> s"str$i").toDF("a", "b").createOrReplaceTempView("jt")
withTable("test_parquet_ctas") {
sql(
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala
index 3f6418cbe8..ac89bbbf8e 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala
@@ -74,7 +74,7 @@ class ParquetHiveCompatibilitySuite extends ParquetCompatibilityTest with TestHi
val schema = spark.table("parquet_compat").schema
val rowRDD = spark.sparkContext.parallelize(rows).coalesce(1)
- spark.createDataFrame(rowRDD, schema).registerTempTable("data")
+ spark.createDataFrame(rowRDD, schema).createOrReplaceTempView("data")
spark.sql("INSERT INTO TABLE parquet_compat SELECT * FROM data")
}
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala
index 78569c5808..cc05e56d66 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala
@@ -32,7 +32,7 @@ class QueryPartitionSuite extends QueryTest with SQLTestUtils with TestHiveSingl
withSQLConf((SQLConf.HIVE_VERIFY_PARTITION_PATH.key, "true")) {
val testData = sparkContext.parallelize(
(1 to 10).map(i => TestData(i, i.toString))).toDF()
- testData.registerTempTable("testData")
+ testData.createOrReplaceTempView("testData")
val tmpDir = Files.createTempDir()
// create the table for test
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala
index 8060ef77e7..7011cd8122 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala
@@ -115,7 +115,7 @@ class StatisticsSuite extends QueryTest with TestHiveSingleton {
sql("DROP TABLE analyzeTable_part").collect()
// Try to analyze a temp table
- sql("""SELECT * FROM src""").registerTempTable("tempTable")
+ sql("""SELECT * FROM src""").createOrReplaceTempView("tempTable")
intercept[UnsupportedOperationException] {
hiveContext.sql("ANALYZE TABLE tempTable COMPUTE STATISTICS")
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/UDFSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/UDFSuite.scala
index d1aa5aa931..d121bcbe15 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/UDFSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/UDFSuite.scala
@@ -53,7 +53,7 @@ class UDFSuite
sql("USE default")
testDF = (1 to 10).map(i => s"sTr$i").toDF("value")
- testDF.registerTempTable(testTableName)
+ testDF.createOrReplaceTempView(testTableName)
expectedDF = (1 to 10).map(i => s"STR$i").toDF("value")
super.beforeAll()
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala
index c97b3f3197..a2bae2e81f 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala
@@ -180,7 +180,7 @@ abstract class AggregationQuerySuite extends QueryTest with SQLTestUtils with Te
val emptyDF = spark.createDataFrame(
sparkContext.emptyRDD[Row],
StructType(StructField("key", StringType) :: StructField("value", IntegerType) :: Nil))
- emptyDF.registerTempTable("emptyTable")
+ emptyDF.createOrReplaceTempView("emptyTable")
// Register UDAFs
spark.udf.register("mydoublesum", new MyDoubleSum)
@@ -200,7 +200,7 @@ abstract class AggregationQuerySuite extends QueryTest with SQLTestUtils with Te
}
test("group by function") {
- Seq((1, 2)).toDF("a", "b").registerTempTable("data")
+ Seq((1, 2)).toDF("a", "b").createOrReplaceTempView("data")
checkAnswer(
sql("SELECT floor(a) AS a, collect_set(b) FROM data GROUP BY floor(a) ORDER BY a"),
@@ -783,7 +783,7 @@ abstract class AggregationQuerySuite extends QueryTest with SQLTestUtils with Te
(5, 8, 17),
(6, 2, 11)).toDF("a", "b", "c")
- covar_tab.registerTempTable("covar_tab")
+ covar_tab.createOrReplaceTempView("covar_tab")
checkAnswer(
spark.sql(
@@ -938,7 +938,7 @@ abstract class AggregationQuerySuite extends QueryTest with SQLTestUtils with Te
spark.createDataFrame(
sparkContext.parallelize(data, 2),
schema)
- .registerTempTable("noInputSchemaUDAF")
+ .createOrReplaceTempView("noInputSchemaUDAF")
checkAnswer(
spark.sql(
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala
index 17422ca1a0..131b06aec8 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala
@@ -80,7 +80,7 @@ class HiveExplainSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
test("SPARK-6212: The EXPLAIN output of CTAS only shows the analyzed plan") {
withTempTable("jt") {
val rdd = sparkContext.parallelize((1 to 10).map(i => s"""{"a":$i, "b":"str$i"}"""))
- hiveContext.read.json(rdd).registerTempTable("jt")
+ hiveContext.read.json(rdd).createOrReplaceTempView("jt")
val outputs = sql(
s"""
|EXPLAIN EXTENDED
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveOperatorQueryableSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveOperatorQueryableSuite.scala
index b252c6ee2f..4d2f190b8e 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveOperatorQueryableSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveOperatorQueryableSuite.scala
@@ -29,8 +29,8 @@ class HiveOperatorQueryableSuite extends QueryTest with TestHiveSingleton {
test("SPARK-5324 query result of describe command") {
hiveContext.loadTestTable("src")
- // register a describe command to be a temp table
- sql("desc src").registerTempTable("mydesc")
+ // Creates a temporary view with the output of a describe command
+ sql("desc src").createOrReplaceTempView("mydesc")
checkAnswer(
sql("desc mydesc"),
Seq(
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HivePlanTest.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HivePlanTest.scala
index d8d3448add..78c0d1f97e 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HivePlanTest.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HivePlanTest.scala
@@ -28,7 +28,7 @@ class HivePlanTest extends QueryTest with TestHiveSingleton {
import hiveContext.implicits._
test("udf constant folding") {
- Seq.empty[Tuple1[Int]].toDF("a").registerTempTable("t")
+ Seq.empty[Tuple1[Int]].toDF("a").createOrReplaceTempView("t")
val optimized = sql("SELECT cos(null) AS c FROM t").queryExecution.optimizedPlan
val correctAnswer = sql("SELECT cast(null as double) AS c FROM t").queryExecution.optimizedPlan
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index 19f8cb3877..2aaaaadb6a 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -685,12 +685,12 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
createQueryTest("case sensitivity when query Hive table",
"SELECT srcalias.KEY, SRCALIAS.value FROM sRc SrCAlias WHERE SrCAlias.kEy < 15")
- test("case sensitivity: registered table") {
+ test("case sensitivity: created temporary view") {
val testData =
TestHive.sparkContext.parallelize(
TestData(1, "str1") ::
TestData(2, "str2") :: Nil)
- testData.toDF().registerTempTable("REGisteredTABle")
+ testData.toDF().createOrReplaceTempView("REGisteredTABle")
assertResult(Array(Row(2, "str2"))) {
sql("SELECT tablealias.A, TABLEALIAS.b FROM reGisteredTABle TableAlias " +
@@ -715,7 +715,7 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
test("SPARK-2180: HAVING support in GROUP BY clauses (positive)") {
val fixture = List(("foo", 2), ("bar", 1), ("foo", 4), ("bar", 3))
.zipWithIndex.map {case ((value, attr), key) => HavingRow(key, value, attr)}
- TestHive.sparkContext.parallelize(fixture).toDF().registerTempTable("having_test")
+ TestHive.sparkContext.parallelize(fixture).toDF().createOrReplaceTempView("having_test")
val results =
sql("SELECT value, max(attr) AS attr FROM having_test GROUP BY value HAVING attr > 3")
.collect()
@@ -819,12 +819,12 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
.collect()
}
- // Describe a registered temporary table.
+ // Describe a temporary view.
val testData =
TestHive.sparkContext.parallelize(
TestData(1, "str1") ::
TestData(1, "str2") :: Nil)
- testData.toDF().registerTempTable("test_describe_commands2")
+ testData.toDF().createOrReplaceTempView("test_describe_commands2")
assertResult(
Array(
@@ -996,9 +996,9 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
}
}
- test("SPARK-3414 regression: should store analyzed logical plan when registering a temp table") {
- sparkContext.makeRDD(Seq.empty[LogEntry]).toDF().registerTempTable("rawLogs")
- sparkContext.makeRDD(Seq.empty[LogFile]).toDF().registerTempTable("logFiles")
+ test("SPARK-3414 regression: should store analyzed logical plan when creating a temporary view") {
+ sparkContext.makeRDD(Seq.empty[LogEntry]).toDF().createOrReplaceTempView("rawLogs")
+ sparkContext.makeRDD(Seq.empty[LogFile]).toDF().createOrReplaceTempView("logFiles")
sql(
"""
@@ -1009,7 +1009,7 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
FROM logFiles
) files
ON rawLogs.filename = files.name
- """).registerTempTable("boom")
+ """).createOrReplaceTempView("boom")
// This should be successfully analyzed
sql("SELECT * FROM boom").queryExecution.analyzed
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveResolutionSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveResolutionSuite.scala
index dd13b83928..b2f19d7753 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveResolutionSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveResolutionSuite.scala
@@ -32,14 +32,14 @@ class HiveResolutionSuite extends HiveComparisonTest {
test("SPARK-3698: case insensitive test for nested data") {
read.json(sparkContext.makeRDD(
- """{"a": [{"a": {"a": 1}}]}""" :: Nil)).registerTempTable("nested")
+ """{"a": [{"a": {"a": 1}}]}""" :: Nil)).createOrReplaceTempView("nested")
// This should be successfully analyzed
sql("SELECT a[0].A.A from nested").queryExecution.analyzed
}
test("SPARK-5278: check ambiguous reference to fields") {
read.json(sparkContext.makeRDD(
- """{"a": [{"b": 1, "B": 2}]}""" :: Nil)).registerTempTable("nested")
+ """{"a": [{"b": 1, "B": 2}]}""" :: Nil)).createOrReplaceTempView("nested")
// there are 2 filed matching field name "b", we should report Ambiguous reference error
val exception = intercept[AnalysisException] {
@@ -78,7 +78,7 @@ class HiveResolutionSuite extends HiveComparisonTest {
test("case insensitivity with scala reflection") {
// Test resolution with Scala Reflection
sparkContext.parallelize(Data(1, 2, Nested(1, 2), Seq(Nested(1, 2))) :: Nil)
- .toDF().registerTempTable("caseSensitivityTest")
+ .toDF().createOrReplaceTempView("caseSensitivityTest")
val query = sql("SELECT a, b, A, B, n.a, n.b, n.A, n.B FROM caseSensitivityTest")
assert(query.schema.fields.map(_.name) === Seq("a", "b", "A", "B", "a", "b", "A", "B"),
@@ -89,14 +89,14 @@ class HiveResolutionSuite extends HiveComparisonTest {
ignore("case insensitivity with scala reflection joins") {
// Test resolution with Scala Reflection
sparkContext.parallelize(Data(1, 2, Nested(1, 2), Seq(Nested(1, 2))) :: Nil)
- .toDF().registerTempTable("caseSensitivityTest")
+ .toDF().createOrReplaceTempView("caseSensitivityTest")
sql("SELECT * FROM casesensitivitytest a JOIN casesensitivitytest b ON a.a = b.a").collect()
}
test("nested repeated resolution") {
sparkContext.parallelize(Data(1, 2, Nested(1, 2), Seq(Nested(1, 2))) :: Nil)
- .toDF().registerTempTable("nestedRepeatedTest")
+ .toDF().createOrReplaceTempView("nestedRepeatedTest")
assert(sql("SELECT nestedArray[0].a FROM nestedRepeatedTest").collect().head(0) === 1)
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
index 8c9c37fece..60f8be5e0e 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
@@ -84,7 +84,7 @@ class HiveTableScanSuite extends HiveComparisonTest {
sql("""insert into table spark_4959 select "hi" from src limit 1""")
table("spark_4959").select(
'col1.as("CaseSensitiveColName"),
- 'col1.as("CaseSensitiveColName2")).registerTempTable("spark_4959_2")
+ 'col1.as("CaseSensitiveColName2")).createOrReplaceTempView("spark_4959_2")
assert(sql("select CaseSensitiveColName from spark_4959_2").head() === Row("hi"))
assert(sql("select casesensitivecolname from spark_4959_2").head() === Row("hi"))
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
index 521964eb4e..23b7f6c75b 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
@@ -153,7 +153,7 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
test("UDFIntegerToString") {
val testData = hiveContext.sparkContext.parallelize(
IntegerCaseClass(1) :: IntegerCaseClass(2) :: Nil).toDF()
- testData.registerTempTable("integerTable")
+ testData.createOrReplaceTempView("integerTable")
val udfName = classOf[UDFIntegerToString].getName
sql(s"CREATE TEMPORARY FUNCTION testUDFIntegerToString AS '$udfName'")
@@ -167,7 +167,7 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
test("UDFToListString") {
val testData = hiveContext.sparkContext.parallelize(StringCaseClass("") :: Nil).toDF()
- testData.registerTempTable("inputTable")
+ testData.createOrReplaceTempView("inputTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFToListString AS '${classOf[UDFToListString].getName}'")
val errMsg = intercept[AnalysisException] {
@@ -182,7 +182,7 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
test("UDFToListInt") {
val testData = hiveContext.sparkContext.parallelize(StringCaseClass("") :: Nil).toDF()
- testData.registerTempTable("inputTable")
+ testData.createOrReplaceTempView("inputTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFToListInt AS '${classOf[UDFToListInt].getName}'")
val errMsg = intercept[AnalysisException] {
@@ -197,7 +197,7 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
test("UDFToStringIntMap") {
val testData = hiveContext.sparkContext.parallelize(StringCaseClass("") :: Nil).toDF()
- testData.registerTempTable("inputTable")
+ testData.createOrReplaceTempView("inputTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFToStringIntMap " +
s"AS '${classOf[UDFToStringIntMap].getName}'")
@@ -213,7 +213,7 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
test("UDFToIntIntMap") {
val testData = hiveContext.sparkContext.parallelize(StringCaseClass("") :: Nil).toDF()
- testData.registerTempTable("inputTable")
+ testData.createOrReplaceTempView("inputTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFToIntIntMap " +
s"AS '${classOf[UDFToIntIntMap].getName}'")
@@ -232,7 +232,7 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
ListListIntCaseClass(Nil) ::
ListListIntCaseClass(Seq((1, 2, 3))) ::
ListListIntCaseClass(Seq((4, 5, 6), (7, 8, 9))) :: Nil).toDF()
- testData.registerTempTable("listListIntTable")
+ testData.createOrReplaceTempView("listListIntTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFListListInt AS '${classOf[UDFListListInt].getName}'")
checkAnswer(
@@ -247,7 +247,7 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
val testData = hiveContext.sparkContext.parallelize(
ListStringCaseClass(Seq("a", "b", "c")) ::
ListStringCaseClass(Seq("d", "e")) :: Nil).toDF()
- testData.registerTempTable("listStringTable")
+ testData.createOrReplaceTempView("listStringTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFListString AS '${classOf[UDFListString].getName}'")
checkAnswer(
@@ -261,7 +261,7 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
test("UDFStringString") {
val testData = hiveContext.sparkContext.parallelize(
StringCaseClass("world") :: StringCaseClass("goodbye") :: Nil).toDF()
- testData.registerTempTable("stringTable")
+ testData.createOrReplaceTempView("stringTable")
sql(s"CREATE TEMPORARY FUNCTION testStringStringUDF AS '${classOf[UDFStringString].getName}'")
checkAnswer(
@@ -283,7 +283,7 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
ListListIntCaseClass(Seq((1, 2, 3))) ::
ListListIntCaseClass(Seq((4, 5, 6), (7, 8, 9))) ::
Nil).toDF()
- testData.registerTempTable("TwoListTable")
+ testData.createOrReplaceTempView("TwoListTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFTwoListList AS '${classOf[UDFTwoListList].getName}'")
checkAnswer(
@@ -295,7 +295,7 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
}
test("Hive UDFs with insufficient number of input arguments should trigger an analysis error") {
- Seq((1, 2)).toDF("a", "b").registerTempTable("testUDF")
+ Seq((1, 2)).toDF("a", "b").createOrReplaceTempView("testUDF")
{
// HiveSimpleUDF
@@ -352,7 +352,7 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
test("Hive UDF in group by") {
withTempTable("tab1") {
- Seq(Tuple1(1451400761)).toDF("test_date").registerTempTable("tab1")
+ Seq(Tuple1(1451400761)).toDF("test_date").createOrReplaceTempView("tab1")
sql(s"CREATE TEMPORARY FUNCTION testUDFToDate AS '${classOf[GenericUDFToDate].getName}'")
val count = sql("select testUDFToDate(cast(test_date as timestamp))" +
" from tab1 group by testUDFToDate(cast(test_date as timestamp))").count()
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index ac9a3930fd..81f3ea8a6e 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -102,14 +102,14 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
test("SPARK-6835: udtf in lateral view") {
val df = Seq((1, 1)).toDF("c1", "c2")
- df.registerTempTable("table1")
+ df.createOrReplaceTempView("table1")
val query = sql("SELECT c1, v FROM table1 LATERAL VIEW stack(3, 1, c1 + 1, c1 + 2) d AS v")
checkAnswer(query, Row(1, 1) :: Row(1, 2) :: Row(1, 3) :: Nil)
}
test("SPARK-13651: generator outputs shouldn't be resolved from its child's output") {
withTempTable("src") {
- Seq(("id1", "value1")).toDF("key", "value").registerTempTable("src")
+ Seq(("id1", "value1")).toDF("key", "value").createOrReplaceTempView("src")
val query =
sql("SELECT genoutput.* FROM src " +
"LATERAL VIEW explode(map('key1', 100, 'key2', 200)) genoutput AS key, value")
@@ -135,8 +135,8 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
Order(1, "Atlas", "MTB", 434, "2015-01-07", "John D", "Pacifica", "CA", 20151),
Order(11, "Swift", "YFlikr", 137, "2015-01-23", "John D", "Hayward", "CA", 20151))
- orders.toDF.registerTempTable("orders1")
- orderUpdates.toDF.registerTempTable("orderupdates1")
+ orders.toDF.createOrReplaceTempView("orders1")
+ orderUpdates.toDF.createOrReplaceTempView("orderupdates1")
sql(
"""CREATE TABLE orders(
@@ -305,7 +305,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
test("SPARK-5371: union with null and sum") {
val df = Seq((1, 1)).toDF("c1", "c2")
- df.registerTempTable("table1")
+ df.createOrReplaceTempView("table1")
val query = sql(
"""
@@ -329,7 +329,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
test("CTAS with WITH clause") {
val df = Seq((1, 1)).toDF("c1", "c2")
- df.registerTempTable("table1")
+ df.createOrReplaceTempView("table1")
sql(
"""
@@ -346,7 +346,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
}
test("explode nested Field") {
- Seq(NestedArray1(NestedArray2(Seq(1, 2, 3)))).toDF.registerTempTable("nestedArray")
+ Seq(NestedArray1(NestedArray2(Seq(1, 2, 3)))).toDF.createOrReplaceTempView("nestedArray")
checkAnswer(
sql("SELECT ints FROM nestedArray LATERAL VIEW explode(a.b) a AS ints"),
Row(1) :: Row(2) :: Row(3) :: Nil)
@@ -543,7 +543,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
}
test("specifying the column list for CTAS") {
- Seq((1, "111111"), (2, "222222")).toDF("key", "value").registerTempTable("mytable1")
+ Seq((1, "111111"), (2, "222222")).toDF("key", "value").createOrReplaceTempView("mytable1")
sql("create table gen__tmp(a int, b string) as select key, value from mytable1")
checkAnswer(
@@ -598,7 +598,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
test("double nested data") {
sparkContext.parallelize(Nested1(Nested2(Nested3(1))) :: Nil)
- .toDF().registerTempTable("nested")
+ .toDF().createOrReplaceTempView("nested")
checkAnswer(
sql("SELECT f1.f2.f3 FROM nested"),
Row(1))
@@ -682,7 +682,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
test("SPARK-4963 DataFrame sample on mutable row return wrong result") {
sql("SELECT * FROM src WHERE key % 2 = 0")
.sample(withReplacement = false, fraction = 0.3)
- .registerTempTable("sampled")
+ .createOrReplaceTempView("sampled")
(1 to 10).foreach { i =>
checkAnswer(
sql("SELECT * FROM sampled WHERE key % 2 = 1"),
@@ -707,7 +707,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
val rowRdd = sparkContext.parallelize(row :: Nil)
- hiveContext.createDataFrame(rowRdd, schema).registerTempTable("testTable")
+ hiveContext.createDataFrame(rowRdd, schema).createOrReplaceTempView("testTable")
sql(
"""CREATE TABLE nullValuesInInnerComplexTypes
@@ -733,14 +733,14 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
test("SPARK-4296 Grouping field with Hive UDF as sub expression") {
val rdd = sparkContext.makeRDD( """{"a": "str", "b":"1", "c":"1970-01-01 00:00:00"}""" :: Nil)
- read.json(rdd).registerTempTable("data")
+ read.json(rdd).createOrReplaceTempView("data")
checkAnswer(
sql("SELECT concat(a, '-', b), year(c) FROM data GROUP BY concat(a, '-', b), year(c)"),
Row("str-1", 1970))
dropTempTable("data")
- read.json(rdd).registerTempTable("data")
+ read.json(rdd).createOrReplaceTempView("data")
checkAnswer(sql("SELECT year(c) + 1 FROM data GROUP BY year(c) + 1"), Row(1971))
dropTempTable("data")
@@ -748,14 +748,14 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
test("resolve udtf in projection #1") {
val rdd = sparkContext.makeRDD((1 to 5).map(i => s"""{"a":[$i, ${i + 1}]}"""))
- read.json(rdd).registerTempTable("data")
+ read.json(rdd).createOrReplaceTempView("data")
val df = sql("SELECT explode(a) AS val FROM data")
val col = df("val")
}
test("resolve udtf in projection #2") {
val rdd = sparkContext.makeRDD((1 to 2).map(i => s"""{"a":[$i, ${i + 1}]}"""))
- read.json(rdd).registerTempTable("data")
+ read.json(rdd).createOrReplaceTempView("data")
checkAnswer(sql("SELECT explode(map(1, 1)) FROM data LIMIT 1"), Row(1, 1) :: Nil)
checkAnswer(sql("SELECT explode(map(1, 1)) as (k1, k2) FROM data LIMIT 1"), Row(1, 1) :: Nil)
intercept[AnalysisException] {
@@ -770,7 +770,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
// TGF with non-TGF in project is allowed in Spark SQL, but not in Hive
test("TGF with non-TGF in projection") {
val rdd = sparkContext.makeRDD( """{"a": "1", "b":"1"}""" :: Nil)
- read.json(rdd).registerTempTable("data")
+ read.json(rdd).createOrReplaceTempView("data")
checkAnswer(
sql("SELECT explode(map(a, b)) as (k1, k2), a, b FROM data"),
Row("1", "1", "1", "1") :: Nil)
@@ -784,7 +784,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
// PreInsertionCasts will actually start to work before ImplicitGenerate and then
// generates an invalid query plan.
val rdd = sparkContext.makeRDD((1 to 5).map(i => s"""{"a":[$i, ${i + 1}]}"""))
- read.json(rdd).registerTempTable("data")
+ read.json(rdd).createOrReplaceTempView("data")
val originalConf = sessionState.convertCTAS
setConf(HiveUtils.CONVERT_CTAS, false)
@@ -824,7 +824,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
Seq.empty[(java.math.BigDecimal, java.math.BigDecimal)]
.toDF("d1", "d2")
.select($"d1".cast(DecimalType(10, 5)).as("d"))
- .registerTempTable("dn")
+ .createOrReplaceTempView("dn")
sql("select d from dn union all select d * 2 from dn")
.queryExecution.analyzed
@@ -832,27 +832,27 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
test("Star Expansion - script transform") {
val data = (1 to 100000).map { i => (i, i, i) }
- data.toDF("d1", "d2", "d3").registerTempTable("script_trans")
+ data.toDF("d1", "d2", "d3").createOrReplaceTempView("script_trans")
assert(100000 === sql("SELECT TRANSFORM (*) USING 'cat' FROM script_trans").count())
}
test("test script transform for stdout") {
val data = (1 to 100000).map { i => (i, i, i) }
- data.toDF("d1", "d2", "d3").registerTempTable("script_trans")
+ data.toDF("d1", "d2", "d3").createOrReplaceTempView("script_trans")
assert(100000 ===
sql("SELECT TRANSFORM (d1, d2, d3) USING 'cat' AS (a,b,c) FROM script_trans").count())
}
test("test script transform for stderr") {
val data = (1 to 100000).map { i => (i, i, i) }
- data.toDF("d1", "d2", "d3").registerTempTable("script_trans")
+ data.toDF("d1", "d2", "d3").createOrReplaceTempView("script_trans")
assert(0 ===
sql("SELECT TRANSFORM (d1, d2, d3) USING 'cat 1>&2' AS (a,b,c) FROM script_trans").count())
}
test("test script transform data type") {
val data = (1 to 5).map { i => (i, i) }
- data.toDF("key", "value").registerTempTable("test")
+ data.toDF("key", "value").createOrReplaceTempView("test")
checkAnswer(
sql("""FROM
|(FROM test SELECT TRANSFORM(key, value) USING 'cat' AS (`thing1` int, thing2 string)) t
@@ -864,7 +864,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
withTempTable("data") {
spark.range(1, 5)
.select(array($"id", $"id" + 1).as("a"), $"id".as("b"), (lit(10) - $"id").as("c"))
- .registerTempTable("data")
+ .createOrReplaceTempView("data")
// case 1: missing sort columns are resolvable if join is true
checkAnswer(
@@ -888,7 +888,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
}
test("test case key when") {
- (1 to 5).map(i => (i, i.toString)).toDF("k", "v").registerTempTable("t")
+ (1 to 5).map(i => (i, i.toString)).toDF("k", "v").createOrReplaceTempView("t")
checkAnswer(
sql("SELECT CASE k WHEN 2 THEN 22 WHEN 4 THEN 44 ELSE 0 END, v FROM t"),
Row(0, "1") :: Row(22, "2") :: Row(0, "3") :: Row(44, "4") :: Row(0, "5") :: Nil)
@@ -897,7 +897,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
test("SPARK-7269 Check analysis failed in case in-sensitive") {
Seq(1, 2, 3).map { i =>
(i.toString, i.toString)
- }.toDF("key", "value").registerTempTable("df_analysis")
+ }.toDF("key", "value").createOrReplaceTempView("df_analysis")
sql("SELECT kEy from df_analysis group by key").collect()
sql("SELECT kEy+3 from df_analysis group by key+3").collect()
sql("SELECT kEy+3, a.kEy, A.kEy from df_analysis A group by key").collect()
@@ -1031,7 +1031,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
test("SPARK-8588 HiveTypeCoercion.inConversion fires too early") {
val df =
createDataFrame(Seq((1, "2014-01-01"), (2, "2015-01-01"), (3, "2016-01-01")))
- df.toDF("id", "datef").registerTempTable("test_SPARK8588")
+ df.toDF("id", "datef").createOrReplaceTempView("test_SPARK8588")
checkAnswer(
sql(
"""
@@ -1046,7 +1046,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
test("SPARK-9371: fix the support for special chars in column names for hive context") {
read.json(sparkContext.makeRDD(
"""{"a": {"c.b": 1}, "b.$q": [{"a@!.q": 1}], "q.w": {"w.i&": [1]}}""" :: Nil))
- .registerTempTable("t")
+ .createOrReplaceTempView("t")
checkAnswer(sql("SELECT a.`c.b`, `b.$q`[0].`a@!.q`, `q.w`.`w.i&`[0] FROM t"), Row(1, 1, 1))
}
@@ -1125,7 +1125,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
spark
.range(5)
.selectExpr("id AS a", "id AS b")
- .registerTempTable("test")
+ .createOrReplaceTempView("test")
checkAnswer(
sql(
@@ -1143,7 +1143,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
spark
.range(5)
.selectExpr("id AS a", "id AS b")
- .registerTempTable("test")
+ .createOrReplaceTempView("test")
val df = sql(
"""FROM test
@@ -1162,7 +1162,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
test("SPARK-10741: Sort on Aggregate using parquet") {
withTable("test10741") {
withTempTable("src") {
- Seq("a" -> 5, "a" -> 9, "b" -> 6).toDF().registerTempTable("src")
+ Seq("a" -> 5, "a" -> 9, "b" -> 6).toDF().createOrReplaceTempView("src")
sql("CREATE TABLE test10741(c1 STRING, c2 INT) STORED AS PARQUET AS SELECT * FROM src")
}
@@ -1374,7 +1374,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
withTempTable("t1") {
spark.range(10)
.select(array($"id", $"id" + 1).as("arr"), $"id")
- .registerTempTable("source")
+ .createOrReplaceTempView("source")
withTable("dest1", "dest2") {
sql("CREATE TABLE dest1 (i INT)")
sql("CREATE TABLE dest2 (i INT)")
@@ -1407,7 +1407,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
withTempTable("t1", "t2") {
val path = dir.getCanonicalPath
val ds = spark.range(10)
- ds.registerTempTable("t1")
+ ds.createOrReplaceTempView("t1")
sql(
s"""CREATE TEMPORARY TABLE t2
@@ -1431,7 +1431,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
"shouldn always be used together with PATH data source option"
) {
withTempTable("t") {
- spark.range(10).registerTempTable("t")
+ spark.range(10).createOrReplaceTempView("t")
val message = intercept[IllegalArgumentException] {
sql(
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLWindowFunctionSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLWindowFunctionSuite.scala
index 4d284e1042..47ceefb88e 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLWindowFunctionSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLWindowFunctionSuite.scala
@@ -40,7 +40,7 @@ class SQLWindowFunctionSuite extends QueryTest with SQLTestUtils with TestHiveSi
WindowData(5, "c", 9),
WindowData(6, "c", 10)
)
- sparkContext.parallelize(data).toDF().registerTempTable("windowData")
+ sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
checkAnswer(
sql(
@@ -112,7 +112,7 @@ class SQLWindowFunctionSuite extends QueryTest with SQLTestUtils with TestHiveSi
WindowData(5, "c", 9),
WindowData(6, "c", 10)
)
- sparkContext.parallelize(data).toDF().registerTempTable("windowData")
+ sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
checkAnswer(
sql(
@@ -139,7 +139,7 @@ class SQLWindowFunctionSuite extends QueryTest with SQLTestUtils with TestHiveSi
WindowData(5, "c", 9),
WindowData(6, "c", 10)
)
- sparkContext.parallelize(data).toDF().registerTempTable("windowData")
+ sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
checkAnswer(
sql(
@@ -182,7 +182,7 @@ class SQLWindowFunctionSuite extends QueryTest with SQLTestUtils with TestHiveSi
WindowData(5, "c", 9),
WindowData(6, "c", 10)
)
- sparkContext.parallelize(data).toDF().registerTempTable("windowData")
+ sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
val e = intercept[AnalysisException] {
sql(
@@ -203,7 +203,7 @@ class SQLWindowFunctionSuite extends QueryTest with SQLTestUtils with TestHiveSi
WindowData(5, "c", 9),
WindowData(6, "c", 10)
)
- sparkContext.parallelize(data).toDF().registerTempTable("windowData")
+ sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
checkAnswer(
sql(
@@ -232,7 +232,7 @@ class SQLWindowFunctionSuite extends QueryTest with SQLTestUtils with TestHiveSi
WindowData(5, "c", 9),
WindowData(6, "c", 11)
)
- sparkContext.parallelize(data).toDF().registerTempTable("windowData")
+ sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
checkAnswer(
sql("select month, product, sum(product + 1) over() from windowData order by area"),
@@ -301,7 +301,7 @@ class SQLWindowFunctionSuite extends QueryTest with SQLTestUtils with TestHiveSi
WindowData(5, "c", 9),
WindowData(6, "c", 11)
)
- sparkContext.parallelize(data).toDF().registerTempTable("windowData")
+ sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
checkAnswer(
sql(
@@ -322,7 +322,7 @@ class SQLWindowFunctionSuite extends QueryTest with SQLTestUtils with TestHiveSi
test("window function: multiple window expressions in a single expression") {
val nums = sparkContext.parallelize(1 to 10).map(x => (x, x % 2)).toDF("x", "y")
- nums.registerTempTable("nums")
+ nums.createOrReplaceTempView("nums")
val expected =
Row(1, 1, 1, 55, 1, 57) ::
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcPartitionDiscoverySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcPartitionDiscoverySuite.scala
index 6161412a49..fed0d11e9d 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcPartitionDiscoverySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcPartitionDiscoverySuite.scala
@@ -90,7 +90,7 @@ class OrcPartitionDiscoverySuite extends QueryTest with TestHiveSingleton with B
makePartitionDir(base, defaultPartitionName, "pi" -> pi, "ps" -> ps))
}
- read.orc(base.getCanonicalPath).registerTempTable("t")
+ read.orc(base.getCanonicalPath).createOrReplaceTempView("t")
withTempTable("t") {
checkAnswer(
@@ -137,7 +137,7 @@ class OrcPartitionDiscoverySuite extends QueryTest with TestHiveSingleton with B
makePartitionDir(base, defaultPartitionName, "pi" -> pi, "ps" -> ps))
}
- read.orc(base.getCanonicalPath).registerTempTable("t")
+ read.orc(base.getCanonicalPath).createOrReplaceTempView("t")
withTempTable("t") {
checkAnswer(
@@ -189,7 +189,7 @@ class OrcPartitionDiscoverySuite extends QueryTest with TestHiveSingleton with B
read
.option(ConfVars.DEFAULTPARTITIONNAME.varname, defaultPartitionName)
.orc(base.getCanonicalPath)
- .registerTempTable("t")
+ .createOrReplaceTempView("t")
withTempTable("t") {
checkAnswer(
@@ -231,7 +231,7 @@ class OrcPartitionDiscoverySuite extends QueryTest with TestHiveSingleton with B
read
.option(ConfVars.DEFAULTPARTITIONNAME.varname, defaultPartitionName)
.orc(base.getCanonicalPath)
- .registerTempTable("t")
+ .createOrReplaceTempView("t")
withTempTable("t") {
checkAnswer(
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
index 9a0885822b..f83b3a3de2 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
@@ -98,7 +98,7 @@ class OrcQuerySuite extends QueryTest with BeforeAndAfterAll with OrcTest {
test("Creating case class RDD table") {
val data = (1 to 100).map(i => (i, s"val_$i"))
- sparkContext.parallelize(data).toDF().registerTempTable("t")
+ sparkContext.parallelize(data).toDF().createOrReplaceTempView("t")
withTempTable("t") {
checkAnswer(sql("SELECT * FROM t"), data.toDF().collect())
}
@@ -223,7 +223,7 @@ class OrcQuerySuite extends QueryTest with BeforeAndAfterAll with OrcTest {
test("appending") {
val data = (0 until 10).map(i => (i, i.toString))
- createDataFrame(data).toDF("c1", "c2").registerTempTable("tmp")
+ createDataFrame(data).toDF("c1", "c2").createOrReplaceTempView("tmp")
withOrcTable(data, "t") {
sql("INSERT INTO TABLE t SELECT * FROM tmp")
checkAnswer(table("t"), (data ++ data).map(Row.fromTuple))
@@ -233,7 +233,7 @@ class OrcQuerySuite extends QueryTest with BeforeAndAfterAll with OrcTest {
test("overwriting") {
val data = (0 until 10).map(i => (i, i.toString))
- createDataFrame(data).toDF("c1", "c2").registerTempTable("tmp")
+ createDataFrame(data).toDF("c1", "c2").createOrReplaceTempView("tmp")
withOrcTable(data, "t") {
sql("INSERT OVERWRITE TABLE t SELECT * FROM tmp")
checkAnswer(table("t"), data.map(Row.fromTuple))
@@ -324,7 +324,7 @@ class OrcQuerySuite extends QueryTest with BeforeAndAfterAll with OrcTest {
""".stripMargin)
val emptyDF = Seq.empty[(Int, String)].toDF("key", "value").coalesce(1)
- emptyDF.registerTempTable("empty")
+ emptyDF.createOrReplaceTempView("empty")
// This creates 1 empty ORC file with Hive ORC SerDe. We are using this trick because
// Spark SQL ORC data source always avoids write empty ORC files.
@@ -340,7 +340,7 @@ class OrcQuerySuite extends QueryTest with BeforeAndAfterAll with OrcTest {
assert(errorMessage.contains("Unable to infer schema for ORC"))
val singleRowDF = Seq((0, "foo")).toDF("key", "value").coalesce(1)
- singleRowDF.registerTempTable("single")
+ singleRowDF.createOrReplaceTempView("single")
spark.sql(
s"""INSERT INTO TABLE empty_orc
@@ -422,7 +422,7 @@ class OrcQuerySuite extends QueryTest with BeforeAndAfterAll with OrcTest {
""".stripMargin)
val singleRowDF = Seq((0, "foo")).toDF("key", "value").coalesce(1)
- singleRowDF.registerTempTable("single")
+ singleRowDF.createOrReplaceTempView("single")
spark.sql(
s"""INSERT INTO TABLE dummy_orc
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
index 96a7364437..6081d86f44 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
@@ -51,7 +51,7 @@ abstract class OrcSuite extends QueryTest with TestHiveSingleton with BeforeAndA
.makeRDD(1 to 10)
.map(i => OrcData(i, s"part-$i"))
.toDF()
- .registerTempTable(s"orc_temp_table")
+ .createOrReplaceTempView(s"orc_temp_table")
sql(
s"""CREATE EXTERNAL TABLE normal_orc(
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala
index f52c6e48c5..7fe158c218 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala
@@ -171,8 +171,9 @@ class ParquetMetastoreSuite extends ParquetPartitioningTest {
sql(s"ALTER TABLE partitioned_parquet_with_complextypes ADD PARTITION (p=$p)")
}
- (1 to 10).map(i => (i, s"str$i")).toDF("a", "b").registerTempTable("jt")
- (1 to 10).map(i => Tuple1(Seq(new Integer(i), null))).toDF("a").registerTempTable("jt_array")
+ (1 to 10).map(i => (i, s"str$i")).toDF("a", "b").createOrReplaceTempView("jt")
+ (1 to 10).map(i => Tuple1(Seq(new Integer(i), null))).toDF("a")
+ .createOrReplaceTempView("jt_array")
setConf(HiveUtils.CONVERT_METASTORE_PARQUET, true)
}
@@ -541,8 +542,8 @@ class ParquetMetastoreSuite extends ParquetPartitioningTest {
|STORED AS PARQUET
""".stripMargin)
- // Temp table to insert data into partitioned table
- Seq("foo", "bar").toDF("a").registerTempTable("test_temp")
+ // Temp view that is used to insert data into partitioned table
+ Seq("foo", "bar").toDF("a").createOrReplaceTempView("test_temp")
sql("INSERT INTO test_added_partitions PARTITION(b='0') SELECT a FROM test_temp")
checkAnswer(
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala b/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala
index 78d2dc28d6..a3183f2977 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala
@@ -91,7 +91,7 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with Tes
yield Row(s"val_$i", s"val_$i", s"val_$i", s"val_$i", 1, 1, 1, 1))
// Self-join
- df.registerTempTable("t")
+ df.createOrReplaceTempView("t")
withTempTable("t") {
checkAnswer(
sql(
@@ -337,7 +337,7 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with Tes
}
test("saveAsTable()/load() - non-partitioned table - ErrorIfExists") {
- Seq.empty[(Int, String)].toDF().registerTempTable("t")
+ Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t")
withTempTable("t") {
intercept[AnalysisException] {
@@ -347,7 +347,7 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with Tes
}
test("saveAsTable()/load() - non-partitioned table - Ignore") {
- Seq.empty[(Int, String)].toDF().registerTempTable("t")
+ Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t")
withTempTable("t") {
testDF.write.format(dataSourceName).mode(SaveMode.Ignore).saveAsTable("t")
@@ -459,7 +459,7 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with Tes
}
test("saveAsTable()/load() - partitioned table - ErrorIfExists") {
- Seq.empty[(Int, String)].toDF().registerTempTable("t")
+ Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t")
withTempTable("t") {
intercept[AnalysisException] {
@@ -474,7 +474,7 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with Tes
}
test("saveAsTable()/load() - partitioned table - Ignore") {
- Seq.empty[(Int, String)].toDF().registerTempTable("t")
+ Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t")
withTempTable("t") {
partitionedTestDF.write