You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2017/10/19 04:07:21 UTC
spark git commit: [SPARK-21551][PYTHON] Increase timeout for
PythonRDD.serveIterator
Repository: spark
Updated Branches:
refs/heads/branch-2.2 010b50cea -> f8c83fdc5
[SPARK-21551][PYTHON] Increase timeout for PythonRDD.serveIterator
Backport of https://github.com/apache/spark/pull/18752 (https://issues.apache.org/jira/browse/SPARK-21551)
(cherry picked from commit 9d3c6640f56e3e4fd195d3ad8cead09df67a72c7)
Author: peay <pe...@protonmail.com>
Closes #19512 from FRosner/branch-2.2.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f8c83fdc
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f8c83fdc
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f8c83fdc
Branch: refs/heads/branch-2.2
Commit: f8c83fdc52ba9120098e52a35085448150af6b50
Parents: 010b50c
Author: peay <pe...@protonmail.com>
Authored: Thu Oct 19 13:07:04 2017 +0900
Committer: hyukjinkwon <gu...@gmail.com>
Committed: Thu Oct 19 13:07:04 2017 +0900
----------------------------------------------------------------------
.../src/main/scala/org/apache/spark/api/python/PythonRDD.scala | 6 +++---
python/pyspark/rdd.py | 2 +-
2 files changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/f8c83fdc/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
index b0dd2fc..807b51f 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
@@ -683,7 +683,7 @@ private[spark] object PythonRDD extends Logging {
* Create a socket server and a background thread to serve the data in `items`,
*
* The socket server can only accept one connection, or close if no connection
- * in 3 seconds.
+ * in 15 seconds.
*
* Once a connection comes in, it tries to serialize all the data in `items`
* and send them into this connection.
@@ -692,8 +692,8 @@ private[spark] object PythonRDD extends Logging {
*/
def serveIterator[T](items: Iterator[T], threadName: String): Int = {
val serverSocket = new ServerSocket(0, 1, InetAddress.getByName("localhost"))
- // Close the socket if no connection in 3 seconds
- serverSocket.setSoTimeout(3000)
+ // Close the socket if no connection in 15 seconds
+ serverSocket.setSoTimeout(15000)
new Thread(threadName) {
setDaemon(true)
http://git-wip-us.apache.org/repos/asf/spark/blob/f8c83fdc/python/pyspark/rdd.py
----------------------------------------------------------------------
diff --git a/python/pyspark/rdd.py b/python/pyspark/rdd.py
index 6014179..aca00bc 100644
--- a/python/pyspark/rdd.py
+++ b/python/pyspark/rdd.py
@@ -127,7 +127,7 @@ def _load_from_socket(port, serializer):
af, socktype, proto, canonname, sa = res
sock = socket.socket(af, socktype, proto)
try:
- sock.settimeout(3)
+ sock.settimeout(15)
sock.connect(sa)
except socket.error:
sock.close()
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org