aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorSean Zhong <seanzhong@databricks.com>2016-05-12 15:51:53 +0800
committerCheng Lian <lian@databricks.com>2016-05-12 15:51:53 +0800
commit33c6eb5218ce3c31cc9f632a67fd2c7057569683 (patch)
treeb8c84c24107bf1ece596450ef3a3eec26df1f21d /sql
parent5207a005cc86618907b8f467abc03eacef485ecd (diff)
downloadspark-33c6eb5218ce3c31cc9f632a67fd2c7057569683.tar.gz
spark-33c6eb5218ce3c31cc9f632a67fd2c7057569683.tar.bz2
spark-33c6eb5218ce3c31cc9f632a67fd2c7057569683.zip
[SPARK-15171][SQL] Deprecate registerTempTable and add dataset.createTempView
## What changes were proposed in this pull request? Deprecates registerTempTable and add dataset.createTempView, dataset.createOrReplaceTempView. ## How was this patch tested? Unit tests. Author: Sean Zhong <seanzhong@databricks.com> Closes #12945 from clockfly/spark-15171.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/DistinctAggregationRewriter.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala26
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala30
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala4
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala17
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala8
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala3
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ddl.scala4
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala12
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala6
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala19
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/DataFrameTimeWindowingSuite.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala18
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/ListTablesSuite.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala6
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetReadBenchmark.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala2
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala6
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveDDLCommandSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLWindowFunctionSuite.scala2
33 files changed, 123 insertions, 78 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/DistinctAggregationRewriter.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/DistinctAggregationRewriter.scala
index 2e30d83a60..063eff4f98 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/DistinctAggregationRewriter.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/DistinctAggregationRewriter.scala
@@ -35,7 +35,7 @@ import org.apache.spark.sql.types.IntegerType
* ("a", "ca1", "cb2", 5),
* ("b", "ca1", "cb1", 13))
* .toDF("key", "cat1", "cat2", "value")
- * data.registerTempTable("data")
+ * data.createOrReplaceTempView("data")
*
* val agg = data.groupBy($"key")
* .agg(
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index f53311c5c9..0fc4ab51de 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -315,7 +315,7 @@ class SessionCatalog(
/**
* Create a temporary table.
*/
- def createTempTable(
+ def createTempView(
name: String,
tableDefinition: LogicalPlan,
overrideIfExists: Boolean): Unit = synchronized {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
index b1fcf011f4..3acb261800 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
@@ -31,7 +31,7 @@ trait AnalysisTest extends PlanTest {
private def makeAnalyzer(caseSensitive: Boolean): Analyzer = {
val conf = new SimpleCatalystConf(caseSensitive)
val catalog = new SessionCatalog(new InMemoryCatalog, EmptyFunctionRegistry, conf)
- catalog.createTempTable("TaBlE", TestRelations.testRelation, overrideIfExists = true)
+ catalog.createTempView("TaBlE", TestRelations.testRelation, overrideIfExists = true)
new Analyzer(catalog, conf) {
override val extendedResolutionRules = EliminateSubqueryAliases :: Nil
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala
index b3b1f5b920..66d9b4c8e3 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala
@@ -52,7 +52,7 @@ class DecimalPrecisionSuite extends PlanTest with BeforeAndAfter {
private val b: Expression = UnresolvedAttribute("b")
before {
- catalog.createTempTable("table", relation, overrideIfExists = true)
+ catalog.createTempView("table", relation, overrideIfExists = true)
}
private def checkType(expression: Expression, expectedType: DataType): Unit = {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
index 80422c20a6..726b7a1e03 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
@@ -199,17 +199,17 @@ class SessionCatalogSuite extends SparkFunSuite {
val catalog = new SessionCatalog(newBasicCatalog())
val tempTable1 = Range(1, 10, 1, 10, Seq())
val tempTable2 = Range(1, 20, 2, 10, Seq())
- catalog.createTempTable("tbl1", tempTable1, overrideIfExists = false)
- catalog.createTempTable("tbl2", tempTable2, overrideIfExists = false)
+ catalog.createTempView("tbl1", tempTable1, overrideIfExists = false)
+ catalog.createTempView("tbl2", tempTable2, overrideIfExists = false)
assert(catalog.getTempTable("tbl1") == Option(tempTable1))
assert(catalog.getTempTable("tbl2") == Option(tempTable2))
assert(catalog.getTempTable("tbl3").isEmpty)
// Temporary table already exists
intercept[TempTableAlreadyExistsException] {
- catalog.createTempTable("tbl1", tempTable1, overrideIfExists = false)
+ catalog.createTempView("tbl1", tempTable1, overrideIfExists = false)
}
// Temporary table already exists but we override it
- catalog.createTempTable("tbl1", tempTable2, overrideIfExists = true)
+ catalog.createTempView("tbl1", tempTable2, overrideIfExists = true)
assert(catalog.getTempTable("tbl1") == Option(tempTable2))
}
@@ -244,7 +244,7 @@ class SessionCatalogSuite extends SparkFunSuite {
val externalCatalog = newBasicCatalog()
val sessionCatalog = new SessionCatalog(externalCatalog)
val tempTable = Range(1, 10, 2, 10, Seq())
- sessionCatalog.createTempTable("tbl1", tempTable, overrideIfExists = false)
+ sessionCatalog.createTempView("tbl1", tempTable, overrideIfExists = false)
sessionCatalog.setCurrentDatabase("db2")
assert(sessionCatalog.getTempTable("tbl1") == Some(tempTable))
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
@@ -256,7 +256,7 @@ class SessionCatalogSuite extends SparkFunSuite {
sessionCatalog.dropTable(TableIdentifier("tbl1"), ignoreIfNotExists = false)
assert(externalCatalog.listTables("db2").toSet == Set("tbl2"))
// If database is specified, temp tables are never dropped
- sessionCatalog.createTempTable("tbl1", tempTable, overrideIfExists = false)
+ sessionCatalog.createTempView("tbl1", tempTable, overrideIfExists = false)
sessionCatalog.createTable(newTable("tbl1", "db2"), ignoreIfExists = false)
sessionCatalog.dropTable(TableIdentifier("tbl1", Some("db2")), ignoreIfNotExists = false)
assert(sessionCatalog.getTempTable("tbl1") == Some(tempTable))
@@ -305,7 +305,7 @@ class SessionCatalogSuite extends SparkFunSuite {
val externalCatalog = newBasicCatalog()
val sessionCatalog = new SessionCatalog(externalCatalog)
val tempTable = Range(1, 10, 2, 10, Seq())
- sessionCatalog.createTempTable("tbl1", tempTable, overrideIfExists = false)
+ sessionCatalog.createTempView("tbl1", tempTable, overrideIfExists = false)
sessionCatalog.setCurrentDatabase("db2")
assert(sessionCatalog.getTempTable("tbl1") == Option(tempTable))
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
@@ -385,7 +385,7 @@ class SessionCatalogSuite extends SparkFunSuite {
val sessionCatalog = new SessionCatalog(externalCatalog)
val tempTable1 = Range(1, 10, 1, 10, Seq())
val metastoreTable1 = externalCatalog.getTable("db2", "tbl1")
- sessionCatalog.createTempTable("tbl1", tempTable1, overrideIfExists = false)
+ sessionCatalog.createTempView("tbl1", tempTable1, overrideIfExists = false)
sessionCatalog.setCurrentDatabase("db2")
// If we explicitly specify the database, we'll look up the relation in that database
assert(sessionCatalog.lookupRelation(TableIdentifier("tbl1", Some("db2")))
@@ -423,7 +423,7 @@ class SessionCatalogSuite extends SparkFunSuite {
assert(!catalog.tableExists(TableIdentifier("tbl2", Some("db1"))))
// If database is explicitly specified, do not check temporary tables
val tempTable = Range(1, 10, 1, 10, Seq())
- catalog.createTempTable("tbl3", tempTable, overrideIfExists = false)
+ catalog.createTempView("tbl3", tempTable, overrideIfExists = false)
assert(!catalog.tableExists(TableIdentifier("tbl3", Some("db2"))))
// If database is not explicitly specified, check the current database
catalog.setCurrentDatabase("db2")
@@ -435,8 +435,8 @@ class SessionCatalogSuite extends SparkFunSuite {
test("list tables without pattern") {
val catalog = new SessionCatalog(newBasicCatalog())
val tempTable = Range(1, 10, 2, 10, Seq())
- catalog.createTempTable("tbl1", tempTable, overrideIfExists = false)
- catalog.createTempTable("tbl4", tempTable, overrideIfExists = false)
+ catalog.createTempView("tbl1", tempTable, overrideIfExists = false)
+ catalog.createTempView("tbl4", tempTable, overrideIfExists = false)
assert(catalog.listTables("db1").toSet ==
Set(TableIdentifier("tbl1"), TableIdentifier("tbl4")))
assert(catalog.listTables("db2").toSet ==
@@ -452,8 +452,8 @@ class SessionCatalogSuite extends SparkFunSuite {
test("list tables with pattern") {
val catalog = new SessionCatalog(newBasicCatalog())
val tempTable = Range(1, 10, 2, 10, Seq())
- catalog.createTempTable("tbl1", tempTable, overrideIfExists = false)
- catalog.createTempTable("tbl4", tempTable, overrideIfExists = false)
+ catalog.createTempView("tbl1", tempTable, overrideIfExists = false)
+ catalog.createTempView("tbl4", tempTable, overrideIfExists = false)
assert(catalog.listTables("db1", "*").toSet == catalog.listTables("db1").toSet)
assert(catalog.listTables("db2", "*").toSet == catalog.listTables("db2").toSet)
assert(catalog.listTables("db2", "tbl*").toSet ==
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
index a9e8329c4b..6f5fb69ea3 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
@@ -321,7 +321,7 @@ final class DataFrameWriter private[sql](df: DataFrame) {
val sink = new MemorySink(df.schema)
val resultDf = Dataset.ofRows(df.sparkSession, new MemoryPlan(sink))
- resultDf.registerTempTable(queryName)
+ resultDf.createOrReplaceTempView(queryName)
val continuousQuery = df.sparkSession.sessionState.continuousQueryManager.startQuery(
queryName,
checkpointLocation,
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
index 45a69cacd1..210ad956ff 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
@@ -2303,13 +2303,39 @@ class Dataset[T] private[sql](
/**
* Registers this [[Dataset]] as a temporary table using the given name. The lifetime of this
- * temporary table is tied to the [[SQLContext]] that was used to create this Dataset.
+ * temporary table is tied to the [[SparkSession]] that was used to create this Dataset.
*
* @group basic
* @since 1.6.0
*/
+ @deprecated("Use createOrReplaceTempView(viewName) instead.", "2.0.0")
def registerTempTable(tableName: String): Unit = {
- sparkSession.registerTable(toDF(), tableName)
+ createOrReplaceTempView(tableName)
+ }
+
+ /**
+ * Creates a temporary view using the given name. The lifetime of this
+ * temporary view is tied to the [[SparkSession]] that was used to create this Dataset.
+ *
+ * @throws AnalysisException if the view name already exists
+ *
+ * @group basic
+ * @since 2.0.0
+ */
+ @throws[AnalysisException]
+ def createTempView(viewName: String): Unit = {
+ sparkSession.createTempView(viewName, toDF(), replaceIfExists = false)
+ }
+
+ /**
+ * Creates a temporary view using the given name. The lifetime of this
+ * temporary view is tied to the [[SparkSession]] that was used to create this Dataset.
+ *
+ * @group basic
+ * @since 2.0.0
+ */
+ def createOrReplaceTempView(viewName: String): Unit = {
+ sparkSession.createTempView(viewName, toDF(), replaceIfExists = true)
}
/**
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
index 168ac7e04b..c64e284635 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
@@ -597,7 +597,7 @@ class SQLContext private[sql](
* only during the lifetime of this instance of SQLContext.
*/
private[sql] def registerDataFrameAsTable(df: DataFrame, tableName: String): Unit = {
- sparkSession.registerTable(df, tableName)
+ sparkSession.createTempView(tableName, df, replaceIfExists = true)
}
/**
@@ -609,7 +609,7 @@ class SQLContext private[sql](
* @since 1.3.0
*/
def dropTempTable(tableName: String): Unit = {
- sparkSession.catalog.dropTempTable(tableName)
+ sparkSession.catalog.dropTempView(tableName)
}
/**
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index c7fa8f71bc..02c9dc03ae 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -284,7 +284,7 @@ class SparkSession private(
* // |-- name: string (nullable = false)
* // |-- age: integer (nullable = true)
*
- * dataFrame.registerTempTable("people")
+ * dataFrame.createOrReplaceTempView("people")
* sparkSession.sql("select name from people").collect.foreach(println)
* }}}
*
@@ -515,17 +515,16 @@ class SparkSession private(
}
/**
- * Registers the given [[DataFrame]] as a temporary table in the catalog.
- * Temporary tables exist only during the lifetime of this instance of [[SparkSession]].
+ * Creates a temporary view with a DataFrame. The lifetime of this temporary view is tied to
+ * this [[SparkSession]].
*/
- protected[sql] def registerTable(df: DataFrame, tableName: String): Unit = {
- sessionState.catalog.createTempTable(
- sessionState.sqlParser.parseTableIdentifier(tableName).table,
- df.logicalPlan,
- overrideIfExists = true)
+ protected[sql] def createTempView(
+ viewName: String, df: DataFrame, replaceIfExists: Boolean) = {
+ sessionState.catalog.createTempView(
+ sessionState.sqlParser.parseTableIdentifier(viewName).table,
+ df.logicalPlan, replaceIfExists)
}
-
/* ----------------- *
| Everything else |
* ----------------- */
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala
index 7a815c1f99..49c0742761 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala
@@ -175,13 +175,13 @@ abstract class Catalog {
options: Map[String, String]): DataFrame
/**
- * Drops the temporary table with the given table name in the catalog.
- * If the table has been cached before, then it will also be uncached.
+ * Drops the temporary view with the given view name in the catalog.
+ * If the view has been cached before, then it will also be uncached.
*
- * @param tableName the name of the table to be dropped.
+ * @param viewName the name of the view to be dropped.
* @since 2.0.0
*/
- def dropTempTable(tableName: String): Unit
+ def dropTempView(viewName: String): Unit
/**
* Returns true if the table is currently cached in-memory.
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index b6e074bf59..3045f3af36 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -753,7 +753,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
if (temp) {
throw new ParseException(
"CREATE TEMPORARY TABLE is not supported yet. " +
- "Please use registerTempTable as an alternative.", ctx)
+ "Please use CREATE TEMPORARY VIEW as an alternative.", ctx)
}
if (ctx.skewSpec != null) {
throw operationNotAllowed("CREATE TABLE ... SKEWED BY", ctx)
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala
index f05401b02b..31dc016a01 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala
@@ -30,7 +30,8 @@ case class CacheTableCommand(
override def run(sparkSession: SparkSession): Seq[Row] = {
plan.foreach { logicalPlan =>
- sparkSession.registerTable(Dataset.ofRows(sparkSession, logicalPlan), tableName)
+ sparkSession.createTempView(
+ tableName, Dataset.ofRows(sparkSession, logicalPlan), replaceIfExists = true)
}
sparkSession.catalog.cacheTable(tableName)
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala
index 70ce5c8429..075849afde 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala
@@ -136,7 +136,7 @@ case class CreateViewCommand(
}
}
- catalog.createTempTable(table.table, logicalPlan, replace)
+ catalog.createTempView(table.table, logicalPlan, replace)
}
/**
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ddl.scala
index 3863be5768..68238dbb46 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ddl.scala
@@ -82,7 +82,7 @@ case class CreateTempTableUsing(
userSpecifiedSchema = userSpecifiedSchema,
className = provider,
options = options)
- sparkSession.sessionState.catalog.createTempTable(
+ sparkSession.sessionState.catalog.createTempView(
tableIdent.table,
Dataset.ofRows(sparkSession, LogicalRelation(dataSource.resolveRelation())).logicalPlan,
overrideIfExists = true)
@@ -113,7 +113,7 @@ case class CreateTempTableUsingAsSelect(
bucketSpec = None,
options = options)
val result = dataSource.write(mode, df)
- sparkSession.sessionState.catalog.createTempTable(
+ sparkSession.sessionState.catalog.createTempView(
tableIdent.table,
Dataset.ofRows(sparkSession, LogicalRelation(result)).logicalPlan,
overrideIfExists = true)
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
index fdfb188b38..473e827f4d 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
@@ -283,16 +283,16 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
}
/**
- * Drops the temporary table with the given table name in the catalog.
- * If the table has been cached/persisted before, it's also unpersisted.
+ * Drops the temporary view with the given view name in the catalog.
+ * If the view has been cached/persisted before, it's also unpersisted.
*
- * @param tableName the name of the table to be unregistered.
+ * @param viewName the name of the view to be dropped.
* @group ddl_ops
* @since 2.0.0
*/
- override def dropTempTable(tableName: String): Unit = {
- sparkSession.cacheManager.tryUncacheQuery(sparkSession.table(tableName))
- sessionCatalog.dropTable(TableIdentifier(tableName), ignoreIfNotExists = true)
+ override def dropTempView(viewName: String): Unit = {
+ sparkSession.cacheManager.tryUncacheQuery(sparkSession.table(viewName))
+ sessionCatalog.dropTable(TableIdentifier(viewName), ignoreIfNotExists = true)
}
/**
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
index 800316cde7..6d8de80a11 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
@@ -288,7 +288,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with SharedSQLContext
test("Drops temporary table") {
testData.select('key).registerTempTable("t1")
spark.table("t1")
- spark.catalog.dropTempTable("t1")
+ spark.catalog.dropTempView("t1")
intercept[AnalysisException](spark.table("t1"))
}
@@ -300,7 +300,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with SharedSQLContext
assert(spark.catalog.isCached("t1"))
assert(spark.catalog.isCached("t2"))
- spark.catalog.dropTempTable("t1")
+ spark.catalog.dropTempView("t1")
intercept[AnalysisException](spark.table("t1"))
assert(!spark.catalog.isCached("t2"))
}
@@ -382,7 +382,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with SharedSQLContext
sql("SELECT key, count(*) FROM orderedTable GROUP BY key ORDER BY key"),
sql("SELECT key, count(*) FROM testData3x GROUP BY key ORDER BY key").collect())
spark.catalog.uncacheTable("orderedTable")
- spark.catalog.dropTempTable("orderedTable")
+ spark.catalog.dropTempView("orderedTable")
// Set up two tables distributed in the same way. Try this with the data distributed into
// different number of partitions.
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
index f77403c13e..f573abf859 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
@@ -994,17 +994,18 @@ class DataFrameSuite extends QueryTest with SharedSQLContext {
// pass case: parquet table (HadoopFsRelation)
df.write.mode(SaveMode.Overwrite).parquet(tempParquetFile.getCanonicalPath)
val pdf = spark.read.parquet(tempParquetFile.getCanonicalPath)
- pdf.registerTempTable("parquet_base")
+ pdf.createOrReplaceTempView("parquet_base")
+
insertion.write.insertInto("parquet_base")
// pass case: json table (InsertableRelation)
df.write.mode(SaveMode.Overwrite).json(tempJsonFile.getCanonicalPath)
val jdf = spark.read.json(tempJsonFile.getCanonicalPath)
- jdf.registerTempTable("json_base")
+ jdf.createOrReplaceTempView("json_base")
insertion.write.mode(SaveMode.Overwrite).insertInto("json_base")
// error cases: insert into an RDD
- df.registerTempTable("rdd_base")
+ df.createOrReplaceTempView("rdd_base")
val e1 = intercept[AnalysisException] {
insertion.write.insertInto("rdd_base")
}
@@ -1012,14 +1013,14 @@ class DataFrameSuite extends QueryTest with SharedSQLContext {
// error case: insert into a logical plan that is not a LeafNode
val indirectDS = pdf.select("_1").filter($"_1" > 5)
- indirectDS.registerTempTable("indirect_ds")
+ indirectDS.createOrReplaceTempView("indirect_ds")
val e2 = intercept[AnalysisException] {
insertion.write.insertInto("indirect_ds")
}
assert(e2.getMessage.contains("Inserting into an RDD-based table is not allowed."))
// error case: insert into an OneRowRelation
- Dataset.ofRows(spark, OneRowRelation).registerTempTable("one_row")
+ Dataset.ofRows(spark, OneRowRelation).createOrReplaceTempView("one_row")
val e3 = intercept[AnalysisException] {
insertion.write.insertInto("one_row")
}
@@ -1443,13 +1444,13 @@ class DataFrameSuite extends QueryTest with SharedSQLContext {
test("SPARK-12982: Add table name validation in temp table registration") {
val df = Seq("foo", "bar").map(Tuple1.apply).toDF("col")
// invalid table name test as below
- intercept[AnalysisException](df.registerTempTable("t~"))
+ intercept[AnalysisException](df.createOrReplaceTempView("t~"))
// valid table name test as below
- df.registerTempTable("table1")
+ df.createOrReplaceTempView("table1")
// another invalid table name test as below
- intercept[AnalysisException](df.registerTempTable("#$@sum"))
+ intercept[AnalysisException](df.createOrReplaceTempView("#$@sum"))
// another invalid table name test as below
- intercept[AnalysisException](df.registerTempTable("table!#"))
+ intercept[AnalysisException](df.createOrReplaceTempView("table!#"))
}
test("assertAnalyzed shouldn't replace original stack trace") {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameTimeWindowingSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameTimeWindowingSuite.scala
index a957d5ba25..4ee2006421 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameTimeWindowingSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameTimeWindowingSuite.scala
@@ -249,7 +249,7 @@ class DataFrameTimeWindowingSuite extends QueryTest with SharedSQLContext with B
try {
f(tableName)
} finally {
- spark.catalog.dropTempTable(tableName)
+ spark.catalog.dropTempView(tableName)
}
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
index 3c8c862c22..0784041f34 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
@@ -22,6 +22,8 @@ import java.sql.{Date, Timestamp}
import scala.language.postfixOps
+import org.scalatest.words.MatcherWords.be
+
import org.apache.spark.sql.catalyst.encoders.{OuterScopes, RowEncoder}
import org.apache.spark.sql.execution.streaming.MemoryStream
import org.apache.spark.sql.functions._
@@ -674,6 +676,22 @@ class DatasetSuite extends QueryTest with SharedSQLContext {
}.getMessage
assert(message.contains("The 0th field of input row cannot be null"))
}
+
+ test("createTempView") {
+ val dataset = Seq(1, 2, 3).toDS()
+ dataset.createOrReplaceTempView("tempView")
+
+ // Overrrides the existing temporary view with same name
+ // No exception should be thrown here.
+ dataset.createOrReplaceTempView("tempView")
+
+ // Throws AnalysisException if temp view with same name already exists
+ val e = intercept[AnalysisException](
+ dataset.createTempView("tempView"))
+ intercept[AnalysisException](dataset.createTempView("tempView"))
+ assert(e.message.contains("already exists"))
+ dataset.sparkSession.catalog.dropTempView("tempView")
+ }
}
case class OtherTuple(_1: String, _2: Int)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ListTablesSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ListTablesSuite.scala
index c88dfe5f24..1c6e6cc15d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/ListTablesSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/ListTablesSuite.scala
@@ -83,7 +83,7 @@ class ListTablesSuite extends QueryTest with BeforeAndAfter with SharedSQLContex
checkAnswer(
spark.wrapped.tables().filter("tableName = 'tables'").select("tableName", "isTemporary"),
Row("tables", true))
- spark.catalog.dropTempTable("tables")
+ spark.catalog.dropTempView("tables")
}
}
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 3bbe87adc4..7020841d31 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -333,7 +333,7 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
"SELECT sum('a'), avg('a'), count(null) FROM testData",
Row(null, null, 0) :: Nil)
} finally {
- spark.catalog.dropTempTable("testData3x")
+ spark.catalog.dropTempView("testData3x")
}
}
@@ -1453,12 +1453,12 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
spark.read.json(sparkContext.makeRDD("""{"a": {"b": [{"c": 1}]}}""" :: Nil))
.registerTempTable("data")
checkAnswer(sql("SELECT a.b[0].c FROM data GROUP BY a.b[0].c"), Row(1))
- spark.catalog.dropTempTable("data")
+ spark.catalog.dropTempView("data")
spark.read.json(
sparkContext.makeRDD("""{"a": {"b": 1}}""" :: Nil)).registerTempTable("data")
checkAnswer(sql("SELECT a.b + 1 FROM data GROUP BY a.b + 1"), Row(2))
- spark.catalog.dropTempTable("data")
+ spark.catalog.dropTempView("data")
}
test("SPARK-4432 Fix attribute reference resolution error when using ORDER BY") {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
index 427f24a9f8..922154320c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
@@ -55,7 +55,7 @@ class UDFSuite extends QueryTest with SharedSQLContext {
val df = Seq((1, "Tearing down the walls that divide us")).toDF("id", "saying")
df.registerTempTable("tmp_table")
checkAnswer(sql("select spark_partition_id() from tmp_table").toDF(), Row(0))
- spark.catalog.dropTempTable("tmp_table")
+ spark.catalog.dropTempView("tmp_table")
}
test("SPARK-8005 input_file_name") {
@@ -66,7 +66,7 @@ class UDFSuite extends QueryTest with SharedSQLContext {
val answer = sql("select input_file_name() from test_table").head().getString(0)
assert(answer.contains(dir.getCanonicalPath))
assert(sql("select input_file_name() from test_table").distinct().collect().length >= 2)
- spark.catalog.dropTempTable("test_table")
+ spark.catalog.dropTempView("test_table")
}
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala
index 88269a6a2b..2099d4e1b3 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala
@@ -191,7 +191,7 @@ class InMemoryColumnarQuerySuite extends QueryTest with SharedSQLContext {
checkAnswer(
sql(s"SELECT DISTINCT ${allColumns} FROM InMemoryCache_different_data_types"),
spark.table("InMemoryCache_different_data_types").collect())
- spark.catalog.dropTempTable("InMemoryCache_different_data_types")
+ spark.catalog.dropTempView("InMemoryCache_different_data_types")
}
test("SPARK-10422: String column in InMemoryColumnarCache needs to override clone method") {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetReadBenchmark.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetReadBenchmark.scala
index 373d3a3a0b..69a600a55b 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetReadBenchmark.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetReadBenchmark.scala
@@ -52,7 +52,7 @@ object ParquetReadBenchmark {
}
def withTempTable(tableNames: String*)(f: => Unit): Unit = {
- try f finally tableNames.foreach(spark.catalog.dropTempTable)
+ try f finally tableNames.foreach(spark.catalog.dropTempView)
}
def withSQLConf(pairs: (String, String)*)(f: => Unit): Unit = {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
index d8a2c3821b..e4d4cecd5b 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
@@ -58,7 +58,7 @@ class CatalogSuite
}
private def createTempTable(name: String): Unit = {
- sessionCatalog.createTempTable(name, Range(1, 2, 3, 4, Seq()), overrideIfExists = true)
+ sessionCatalog.createTempView(name, Range(1, 2, 3, 4, Seq()), overrideIfExists = true)
}
private def dropTable(name: String, db: Option[String] = None): Unit = {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
index a49a8c9f2c..45a9c9dc47 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
@@ -153,7 +153,7 @@ private[sql] trait SQLTestUtils
try f finally {
// If the test failed part way, we don't want to mask the failure by failing to remove
// temp tables that never got created.
- try tableNames.foreach(spark.catalog.dropTempTable) catch {
+ try tableNames.foreach(spark.catalog.dropTempView) catch {
case _: NoSuchTableException =>
}
}
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
index 2f20cde4b1..4c528fbbbe 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
@@ -130,7 +130,7 @@ private[hive] class HiveMetastoreCatalog(sparkSession: SparkSession) extends Log
options = options)
LogicalRelation(
- dataSource.resolveRelation(),
+ dataSource.resolveRelation(checkPathExist = true),
metastoreTableIdentifier = Some(TableIdentifier(in.name, Some(in.database))))
}
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala
index 093cd3a96c..d96eb0169e 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala
@@ -31,7 +31,7 @@ class ErrorPositionSuite extends QueryTest with TestHiveSingleton with BeforeAnd
override protected def beforeEach(): Unit = {
super.beforeEach()
if (spark.wrapped.tableNames().contains("src")) {
- spark.catalog.dropTempTable("src")
+ spark.catalog.dropTempView("src")
}
Seq((1, "")).toDF("key", "value").registerTempTable("src")
Seq((1, 1, 1)).toDF("a", "a", "b").registerTempTable("dupAttributes")
@@ -39,8 +39,8 @@ class ErrorPositionSuite extends QueryTest with TestHiveSingleton with BeforeAnd
override protected def afterEach(): Unit = {
try {
- spark.catalog.dropTempTable("src")
- spark.catalog.dropTempTable("dupAttributes")
+ spark.catalog.dropTempView("src")
+ spark.catalog.dropTempView("dupAttributes")
} finally {
super.afterEach()
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveDDLCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveDDLCommandSuite.scala
index 538e218f7e..2d8b1f325a 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveDDLCommandSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveDDLCommandSuite.scala
@@ -348,7 +348,7 @@ class HiveDDLCommandSuite extends PlanTest {
test("create table - temporary") {
val query = "CREATE TEMPORARY TABLE tab1 (id int, name string)"
val e = intercept[ParseException] { parser.parsePlan(query) }
- assert(e.message.contains("registerTempTable"))
+ assert(e.message.contains("CREATE TEMPORARY TABLE is not supported yet"))
}
test("create table - external") {
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala
index e8188e5f02..8dc756b938 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala
@@ -33,7 +33,7 @@ class ListTablesSuite extends QueryTest with TestHiveSingleton with BeforeAndAft
override def beforeAll(): Unit = {
super.beforeAll()
// The catalog in HiveContext is a case insensitive one.
- sessionState.catalog.createTempTable(
+ sessionState.catalog.createTempView(
"ListTablesSuiteTable", df.logicalPlan, overrideIfExists = true)
sql("CREATE TABLE HiveListTablesSuiteTable (key int, value string)")
sql("CREATE DATABASE IF NOT EXISTS ListTablesSuiteDB")
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala
index 0f416eb24d..c97b3f3197 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala
@@ -193,7 +193,7 @@ abstract class AggregationQuerySuite extends QueryTest with SQLTestUtils with Te
spark.sql("DROP TABLE IF EXISTS agg1")
spark.sql("DROP TABLE IF EXISTS agg2")
spark.sql("DROP TABLE IF EXISTS agg3")
- spark.catalog.dropTempTable("emptyTable")
+ spark.catalog.dropTempView("emptyTable")
} finally {
super.afterAll()
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
index 51d537d43a..521964eb4e 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
@@ -347,7 +347,7 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDTFExplode")
}
- spark.catalog.dropTempTable("testUDF")
+ spark.catalog.dropTempView("testUDF")
}
test("Hive UDF in group by") {
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLWindowFunctionSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLWindowFunctionSuite.scala
index cbbeacf6ad..4d284e1042 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLWindowFunctionSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLWindowFunctionSuite.scala
@@ -353,7 +353,7 @@ class SQLWindowFunctionSuite extends QueryTest with SQLTestUtils with TestHiveSi
checkAnswer(actual, expected)
- spark.catalog.dropTempTable("nums")
+ spark.catalog.dropTempView("nums")
}
test("SPARK-7595: Window will cause resolve failed with self join") {