diff options
author | Reynold Xin <rxin@databricks.com> | 2016-07-05 11:36:05 -0700 |
---|---|---|
committer | Reynold Xin <rxin@databricks.com> | 2016-07-05 11:36:05 -0700 |
commit | 16a2a7d714f945b06978e3bd20a58ea32f0621ac (patch) | |
tree | 76fd896b952ea96a890fcb5500af6344886c46cd /sql/hive/src/main | |
parent | 07d9c5327f050f9da611d5239f61ed73b36ce4e6 (diff) | |
download | spark-16a2a7d714f945b06978e3bd20a58ea32f0621ac.tar.gz spark-16a2a7d714f945b06978e3bd20a58ea32f0621ac.tar.bz2 spark-16a2a7d714f945b06978e3bd20a58ea32f0621ac.zip |
[SPARK-16311][SQL] Metadata refresh should work on temporary views
## What changes were proposed in this pull request?
This patch fixes the bug that the refresh command does not work on temporary views. This patch is based on https://github.com/apache/spark/pull/13989, but removes the public Dataset.refresh() API as well as improved test coverage.
Note that I actually think the public refresh() API is very useful. We can in the future implement it by also invalidating the lazy vals in QueryExecution (or alternatively just create a new QueryExecution).
## How was this patch tested?
Re-enabled a previously ignored test, and added a new test suite for Hive testing behavior of temporary views against MetastoreRelation.
Author: Reynold Xin <rxin@databricks.com>
Author: petermaxlee <petermaxlee@gmail.com>
Closes #14009 from rxin/SPARK-16311.
Diffstat (limited to 'sql/hive/src/main')
-rw-r--r-- | sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala | 4 | ||||
-rw-r--r-- | sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala | 5 |
2 files changed, 1 insertions, 8 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala index 7dae473f47..20e64a4e09 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala @@ -147,10 +147,6 @@ private[hive] class HiveMetastoreCatalog(sparkSession: SparkSession) extends Log // it is better at here to invalidate the cache to avoid confusing waring logs from the // cache loader (e.g. cannot find data source provider, which is only defined for // data source table.). - invalidateTable(tableIdent) - } - - def invalidateTable(tableIdent: TableIdentifier): Unit = { cachedDataSourceTables.invalidate(getQualifiedTableName(tableIdent)) } diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala index 18b8dafe64..ebb6711f6a 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala @@ -90,13 +90,10 @@ private[sql] class HiveSessionCatalog( val CreateTables: Rule[LogicalPlan] = metastoreCatalog.CreateTables override def refreshTable(name: TableIdentifier): Unit = { + super.refreshTable(name) metastoreCatalog.refreshTable(name) } - override def invalidateTable(name: TableIdentifier): Unit = { - metastoreCatalog.invalidateTable(name) - } - def invalidateCache(): Unit = { metastoreCatalog.cachedDataSourceTables.invalidateAll() } |