From 3664142350afb6bf40a8bcb3508b56670603dae4 Mon Sep 17 00:00:00 2001 From: felixcheung Date: Tue, 19 Apr 2016 17:29:28 -0700 Subject: [SPARK-14717] [PYTHON] Scala, Python APIs for Dataset.unpersist differ in default blocking value ## What changes were proposed in this pull request? Change unpersist blocking parameter default value to match Scala ## How was this patch tested? unit tests, manual tests jkbradley davies Author: felixcheung Closes #12507 from felixcheung/pyunpersist. --- python/pyspark/sql/dataframe.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) (limited to 'python/pyspark/sql/dataframe.py') diff --git a/python/pyspark/sql/dataframe.py b/python/pyspark/sql/dataframe.py index b4fa836893..328bda6601 100644 --- a/python/pyspark/sql/dataframe.py +++ b/python/pyspark/sql/dataframe.py @@ -326,9 +326,11 @@ class DataFrame(object): return self @since(1.3) - def unpersist(self, blocking=True): + def unpersist(self, blocking=False): """Marks the :class:`DataFrame` as non-persistent, and remove all blocks for it from memory and disk. + + .. note:: `blocking` default has changed to False to match Scala in 2.0. """ self.is_cached = False self._jdf.unpersist(blocking) -- cgit v1.2.3