aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDavies Liu <davies@databricks.com>2015-04-02 12:18:33 -0700
committerJosh Rosen <joshrosen@databricks.com>2015-04-02 12:21:04 -0700
commita73055f7f9104cb5a9ed43a6cd4a82d463702b60 (patch)
treed2af38fd20a1338c46c8d6d3e321e5f273ce50cc
parent758ebf77d7daded7c5f6f41ee269205bc246d487 (diff)
downloadspark-a73055f7f9104cb5a9ed43a6cd4a82d463702b60.tar.gz
spark-a73055f7f9104cb5a9ed43a6cd4a82d463702b60.tar.bz2
spark-a73055f7f9104cb5a9ed43a6cd4a82d463702b60.zip
[SPARK-6667] [PySpark] remove setReuseAddress
The reused address on server side had caused the server can not acknowledge the connected connections, remove it. This PR will retry once after timeout, it also add a timeout at client side. Author: Davies Liu <davies@databricks.com> Closes #5324 from davies/collect_hang and squashes the following commits: e5a51a2 [Davies Liu] remove setReuseAddress 7977c2f [Davies Liu] do retry on client side b838f35 [Davies Liu] retry after timeout (cherry picked from commit 0cce5451adfc6bf4661bcf67aca3db26376455fe) Signed-off-by: Josh Rosen <joshrosen@databricks.com>
-rw-r--r--core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala1
-rw-r--r--python/pyspark/rdd.py1
2 files changed, 1 insertions, 1 deletions
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
index d3077792b5..8241e4fd77 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
@@ -623,7 +623,6 @@ private[spark] object PythonRDD extends Logging {
*/
private def serveIterator[T](items: Iterator[T], threadName: String): Int = {
val serverSocket = new ServerSocket(0, 1)
- serverSocket.setReuseAddress(true)
// Close the socket if no connection in 3 seconds
serverSocket.setSoTimeout(3000)
diff --git a/python/pyspark/rdd.py b/python/pyspark/rdd.py
index f1037e04bb..5f7806b11c 100644
--- a/python/pyspark/rdd.py
+++ b/python/pyspark/rdd.py
@@ -114,6 +114,7 @@ def _parse_memory(s):
def _load_from_socket(port, serializer):
sock = socket.socket()
+ sock.settimeout(3)
try:
sock.connect(("localhost", port))
rf = sock.makefile("rb", 65536)