aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorzsxwing <zsxwing@gmail.com>2015-04-21 18:37:53 -0700
committerReynold Xin <rxin@databricks.com>2015-04-21 18:37:53 -0700
commit3a3f7100f4ead9b7ac50e9711ac50b603ebf6bea (patch)
tree9270d0d9a0b0616465c30efe0902a12245de0d3a
parenta0761ec7063f984dcadc8d154f83dd9cfd1c5e0b (diff)
downloadspark-3a3f7100f4ead9b7ac50e9711ac50b603ebf6bea.tar.gz
spark-3a3f7100f4ead9b7ac50e9711ac50b603ebf6bea.tar.bz2
spark-3a3f7100f4ead9b7ac50e9711ac50b603ebf6bea.zip
[SPARK-6490][Docs] Add docs for rpc configurations
Added docs for rpc configurations and also fixed two places that should have been fixed in #5595. Author: zsxwing <zsxwing@gmail.com> Closes #5607 from zsxwing/SPARK-6490-docs and squashes the following commits: 25a6736 [zsxwing] Increase the default timeout to 120s 6e37c30 [zsxwing] Update docs 5577540 [zsxwing] Use spark.network.timeout as the default timeout if it presents 4f07174 [zsxwing] Fix unit tests 1c2cf26 [zsxwing] Add docs for rpc configurations
-rw-r--r--core/src/main/scala/org/apache/spark/util/RpcUtils.scala6
-rw-r--r--core/src/test/scala/org/apache/spark/SparkConfSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala2
-rw-r--r--docs/configuration.md34
4 files changed, 38 insertions, 6 deletions
diff --git a/core/src/main/scala/org/apache/spark/util/RpcUtils.scala b/core/src/main/scala/org/apache/spark/util/RpcUtils.scala
index 5ae793e0e8..f16cc8e7e4 100644
--- a/core/src/main/scala/org/apache/spark/util/RpcUtils.scala
+++ b/core/src/main/scala/org/apache/spark/util/RpcUtils.scala
@@ -48,11 +48,13 @@ object RpcUtils {
/** Returns the default Spark timeout to use for RPC ask operations. */
def askTimeout(conf: SparkConf): FiniteDuration = {
- conf.getTimeAsSeconds("spark.rpc.askTimeout", "30s") seconds
+ conf.getTimeAsSeconds("spark.rpc.askTimeout",
+ conf.get("spark.network.timeout", "120s")) seconds
}
/** Returns the default Spark timeout to use for RPC remote endpoint lookup. */
def lookupTimeout(conf: SparkConf): FiniteDuration = {
- conf.getTimeAsSeconds("spark.rpc.lookupTimeout", "30s") seconds
+ conf.getTimeAsSeconds("spark.rpc.lookupTimeout",
+ conf.get("spark.network.timeout", "120s")) seconds
}
}
diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
index d7d8014a20..272e6af051 100644
--- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
@@ -227,7 +227,7 @@ class SparkConfSuite extends FunSuite with LocalSparkContext with ResetSystemPro
test("akka deprecated configs") {
val conf = new SparkConf()
- assert(!conf.contains("spark.rpc.num.retries"))
+ assert(!conf.contains("spark.rpc.numRetries"))
assert(!conf.contains("spark.rpc.retry.wait"))
assert(!conf.contains("spark.rpc.askTimeout"))
assert(!conf.contains("spark.rpc.lookupTimeout"))
diff --git a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
index 5fbda37c7c..44c88b00c4 100644
--- a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
@@ -156,7 +156,7 @@ abstract class RpcEnvSuite extends FunSuite with BeforeAndAfterAll {
val conf = new SparkConf()
conf.set("spark.rpc.retry.wait", "0")
- conf.set("spark.rpc.num.retries", "1")
+ conf.set("spark.rpc.numRetries", "1")
val anotherEnv = createRpcEnv(conf, "remote", 13345)
// Use anotherEnv to find out the RpcEndpointRef
val rpcEndpointRef = anotherEnv.setupEndpointRef("local", env.address, "ask-timeout")
diff --git a/docs/configuration.md b/docs/configuration.md
index d9e9e67026..d587b91124 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -963,8 +963,9 @@ Apart from these, the following properties are also available, and may be useful
<td>
Default timeout for all network interactions. This config will be used in place of
<code>spark.core.connection.ack.wait.timeout</code>, <code>spark.akka.timeout</code>,
- <code>spark.storage.blockManagerSlaveTimeoutMs</code> or
- <code>spark.shuffle.io.connectionTimeout</code>, if they are not configured.
+ <code>spark.storage.blockManagerSlaveTimeoutMs</code>,
+ <code>spark.shuffle.io.connectionTimeout</code>, <code>spark.rpc.askTimeout</code> or
+ <code>spark.rpc.lookupTimeout</code> if they are not configured.
</td>
</tr>
<tr>
@@ -982,6 +983,35 @@ Apart from these, the following properties are also available, and may be useful
This is only relevant for the Spark shell.
</td>
</tr>
+<tr>
+ <td><code>spark.rpc.numRetries</code></td>
+ <td>3</td>
+ Number of times to retry before an RPC task gives up.
+ An RPC task will run at most times of this number.
+ <td>
+ </td>
+</tr>
+<tr>
+ <td><code>spark.rpc.retry.wait</code></td>
+ <td>3s</td>
+ <td>
+ Duration for an RPC ask operation to wait before retrying.
+ </td>
+</tr>
+<tr>
+ <td><code>spark.rpc.askTimeout</code></td>
+ <td>120s</td>
+ <td>
+ Duration for an RPC ask operation to wait before timing out.
+ </td>
+</tr>
+<tr>
+ <td><code>spark.rpc.lookupTimeout</code></td>
+ <td>120s</td>
+ Duration for an RPC remote endpoint lookup operation to wait before timing out.
+ <td>
+ </td>
+</tr>
</table>
#### Scheduling