aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/worker.py
diff options
context:
space:
mode:
authorDavies Liu <davies.liu@gmail.com>2014-10-23 17:20:00 -0700
committerJosh Rosen <joshrosen@databricks.com>2014-10-23 17:20:00 -0700
commite595c8d08a20a122295af62d5e9cc4116f9727f6 (patch)
treeec0226aecad30372b9ece27e534f4482c24c94bf /python/pyspark/worker.py
parent83b7a1c6503adce1826fc537b4db47e534da5cae (diff)
downloadspark-e595c8d08a20a122295af62d5e9cc4116f9727f6.tar.gz
spark-e595c8d08a20a122295af62d5e9cc4116f9727f6.tar.bz2
spark-e595c8d08a20a122295af62d5e9cc4116f9727f6.zip
[SPARK-3993] [PySpark] fix bug while reuse worker after take()
After take(), maybe there are some garbage left in the socket, then next task assigned to this worker will hang because of corrupted data. We should make sure the socket is clean before reuse it, write END_OF_STREAM at the end, and check it after read out all result from python. Author: Davies Liu <davies.liu@gmail.com> Author: Davies Liu <davies@databricks.com> Closes #2838 from davies/fix_reuse and squashes the following commits: 8872914 [Davies Liu] fix tests 660875b [Davies Liu] fix bug while reuse worker after take()
Diffstat (limited to 'python/pyspark/worker.py')
-rw-r--r--python/pyspark/worker.py11
1 files changed, 9 insertions, 2 deletions
diff --git a/python/pyspark/worker.py b/python/pyspark/worker.py
index 8257dddfee..2bdccb5e93 100644
--- a/python/pyspark/worker.py
+++ b/python/pyspark/worker.py
@@ -57,7 +57,7 @@ def main(infile, outfile):
boot_time = time.time()
split_index = read_int(infile)
if split_index == -1: # for unit tests
- return
+ exit(-1)
# initialize global state
shuffle.MemoryBytesSpilled = 0
@@ -111,7 +111,6 @@ def main(infile, outfile):
try:
write_int(SpecialLengths.PYTHON_EXCEPTION_THROWN, outfile)
write_with_length(traceback.format_exc(), outfile)
- outfile.flush()
except IOError:
# JVM close the socket
pass
@@ -131,6 +130,14 @@ def main(infile, outfile):
for (aid, accum) in _accumulatorRegistry.items():
pickleSer._write_with_length((aid, accum._value), outfile)
+ # check end of stream
+ if read_int(infile) == SpecialLengths.END_OF_STREAM:
+ write_int(SpecialLengths.END_OF_STREAM, outfile)
+ else:
+ # write a different value to tell JVM to not reuse this worker
+ write_int(SpecialLengths.END_OF_DATA_SECTION, outfile)
+ exit(-1)
+
if __name__ == '__main__':
# Read a local port to connect to from stdin