aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark
diff options
context:
space:
mode:
authorWenchen Fan <wenchen@databricks.com>2016-10-11 15:21:28 +0800
committerWenchen Fan <wenchen@databricks.com>2016-10-11 15:21:28 +0800
commit7388ad94d717784a1837ac5a4a9b53219892d080 (patch)
tree1a4e396fd19e63f299b3d0e55289ddec3174b42c /python/pyspark
parent658c7147f5bf637f36e8c66b9207d94b1e7c74c5 (diff)
downloadspark-7388ad94d717784a1837ac5a4a9b53219892d080.tar.gz
spark-7388ad94d717784a1837ac5a4a9b53219892d080.tar.bz2
spark-7388ad94d717784a1837ac5a4a9b53219892d080.zip
[SPARK-17338][SQL][FOLLOW-UP] add global temp view
## What changes were proposed in this pull request? address post hoc review comments for https://github.com/apache/spark/pull/14897 ## How was this patch tested? N/A Author: Wenchen Fan <wenchen@databricks.com> Closes #15424 from cloud-fan/global-temp-view.
Diffstat (limited to 'python/pyspark')
-rw-r--r--python/pyspark/sql/catalog.py5
1 files changed, 5 insertions, 0 deletions
diff --git a/python/pyspark/sql/catalog.py b/python/pyspark/sql/catalog.py
index df3bf4254d..a36d02e0db 100644
--- a/python/pyspark/sql/catalog.py
+++ b/python/pyspark/sql/catalog.py
@@ -169,6 +169,10 @@ class Catalog(object):
def dropTempView(self, viewName):
"""Drops the local temporary view with the given view name in the catalog.
If the view has been cached before, then it will also be uncached.
+ Returns true if this view is dropped successfully, false otherwise.
+
+ Note that, the return type of this method was None in Spark 2.0, but changed to Boolean
+ in Spark 2.1.
>>> spark.createDataFrame([(1, 1)]).createTempView("my_table")
>>> spark.table("my_table").collect()
@@ -185,6 +189,7 @@ class Catalog(object):
def dropGlobalTempView(self, viewName):
"""Drops the global temporary view with the given view name in the catalog.
If the view has been cached before, then it will also be uncached.
+ Returns true if this view is dropped successfully, false otherwise.
>>> spark.createDataFrame([(1, 1)]).createGlobalTempView("my_table")
>>> spark.table("global_temp.my_table").collect()