aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/sql.py
diff options
context:
space:
mode:
authorDavies Liu <davies.liu@gmail.com>2014-09-18 18:11:48 -0700
committerJosh Rosen <joshrosen@apache.org>2014-09-18 18:11:48 -0700
commite77fa81a61798c89d5a9b6c9dc067d11785254b7 (patch)
tree2d84f29922e4523f223baff1c84573754c1cf0c7 /python/pyspark/sql.py
parent9306297d1d888d0430f79b2133ee7377871a3a18 (diff)
downloadspark-e77fa81a61798c89d5a9b6c9dc067d11785254b7.tar.gz
spark-e77fa81a61798c89d5a9b6c9dc067d11785254b7.tar.bz2
spark-e77fa81a61798c89d5a9b6c9dc067d11785254b7.zip
[SPARK-3554] [PySpark] use broadcast automatically for large closure
Py4j can not handle large string efficiently, so we should use broadcast for large closure automatically. (Broadcast use local filesystem to pass through data). Author: Davies Liu <davies.liu@gmail.com> Closes #2417 from davies/command and squashes the following commits: fbf4e97 [Davies Liu] bugfix aefd508 [Davies Liu] use broadcast automatically for large closure
Diffstat (limited to 'python/pyspark/sql.py')
-rw-r--r--python/pyspark/sql.py8
1 files changed, 6 insertions, 2 deletions
diff --git a/python/pyspark/sql.py b/python/pyspark/sql.py
index 8f6dbab240..42a9920f10 100644
--- a/python/pyspark/sql.py
+++ b/python/pyspark/sql.py
@@ -27,7 +27,7 @@ import warnings
from array import array
from operator import itemgetter
-from pyspark.rdd import RDD, PipelinedRDD
+from pyspark.rdd import RDD
from pyspark.serializers import BatchedSerializer, PickleSerializer, CloudPickleSerializer
from pyspark.storagelevel import StorageLevel
from pyspark.traceback_utils import SCCallSiteSync
@@ -975,7 +975,11 @@ class SQLContext(object):
command = (func,
BatchedSerializer(PickleSerializer(), 1024),
BatchedSerializer(PickleSerializer(), 1024))
- pickled_command = CloudPickleSerializer().dumps(command)
+ ser = CloudPickleSerializer()
+ pickled_command = ser.dumps(command)
+ if pickled_command > (1 << 20): # 1M
+ broadcast = self._sc.broadcast(pickled_command)
+ pickled_command = ser.dumps(broadcast)
broadcast_vars = ListConverter().convert(
[x._jbroadcast for x in self._sc._pickled_broadcast_vars],
self._sc._gateway._gateway_client)