[SPARK-6490][Docs] Add docs for rpc configurations

Added docs for rpc configurations and also fixed two places that should have been fixed in #5595.

Author: zsxwing <zsxwing@gmail.com>

Closes #5607 from zsxwing/SPARK-6490-docs and squashes the following commits:

25a6736 [zsxwing] Increase the default timeout to 120s
6e37c30 [zsxwing] Update docs
5577540 [zsxwing] Use spark.network.timeout as the default timeout if it presents
4f07174 [zsxwing] Fix unit tests
1c2cf26 [zsxwing] Add docs for rpc configurations
This commit is contained in:
zsxwing 2015-04-21 18:37:53 -07:00 committed by Reynold Xin
parent a0761ec706
commit 3a3f7100f4
4 changed files with 38 additions and 6 deletions

View file

@ -48,11 +48,13 @@ object RpcUtils {
/** Returns the default Spark timeout to use for RPC ask operations. */
def askTimeout(conf: SparkConf): FiniteDuration = {
conf.getTimeAsSeconds("spark.rpc.askTimeout", "30s") seconds
conf.getTimeAsSeconds("spark.rpc.askTimeout",
conf.get("spark.network.timeout", "120s")) seconds
}
/** Returns the default Spark timeout to use for RPC remote endpoint lookup. */
def lookupTimeout(conf: SparkConf): FiniteDuration = {
conf.getTimeAsSeconds("spark.rpc.lookupTimeout", "30s") seconds
conf.getTimeAsSeconds("spark.rpc.lookupTimeout",
conf.get("spark.network.timeout", "120s")) seconds
}
}

View file

@ -227,7 +227,7 @@ class SparkConfSuite extends FunSuite with LocalSparkContext with ResetSystemPro
test("akka deprecated configs") {
val conf = new SparkConf()
assert(!conf.contains("spark.rpc.num.retries"))
assert(!conf.contains("spark.rpc.numRetries"))
assert(!conf.contains("spark.rpc.retry.wait"))
assert(!conf.contains("spark.rpc.askTimeout"))
assert(!conf.contains("spark.rpc.lookupTimeout"))

View file

@ -156,7 +156,7 @@ abstract class RpcEnvSuite extends FunSuite with BeforeAndAfterAll {
val conf = new SparkConf()
conf.set("spark.rpc.retry.wait", "0")
conf.set("spark.rpc.num.retries", "1")
conf.set("spark.rpc.numRetries", "1")
val anotherEnv = createRpcEnv(conf, "remote", 13345)
// Use anotherEnv to find out the RpcEndpointRef
val rpcEndpointRef = anotherEnv.setupEndpointRef("local", env.address, "ask-timeout")

View file

@ -963,8 +963,9 @@ Apart from these, the following properties are also available, and may be useful
<td>
Default timeout for all network interactions. This config will be used in place of
<code>spark.core.connection.ack.wait.timeout</code>, <code>spark.akka.timeout</code>,
<code>spark.storage.blockManagerSlaveTimeoutMs</code> or
<code>spark.shuffle.io.connectionTimeout</code>, if they are not configured.
<code>spark.storage.blockManagerSlaveTimeoutMs</code>,
<code>spark.shuffle.io.connectionTimeout</code>, <code>spark.rpc.askTimeout</code> or
<code>spark.rpc.lookupTimeout</code> if they are not configured.
</td>
</tr>
<tr>
@ -982,6 +983,35 @@ Apart from these, the following properties are also available, and may be useful
This is only relevant for the Spark shell.
</td>
</tr>
<tr>
<td><code>spark.rpc.numRetries</code></td>
<td>3</td>
Number of times to retry before an RPC task gives up.
An RPC task will run at most times of this number.
<td>
</td>
</tr>
<tr>
<td><code>spark.rpc.retry.wait</code></td>
<td>3s</td>
<td>
Duration for an RPC ask operation to wait before retrying.
</td>
</tr>
<tr>
<td><code>spark.rpc.askTimeout</code></td>
<td>120s</td>
<td>
Duration for an RPC ask operation to wait before timing out.
</td>
</tr>
<tr>
<td><code>spark.rpc.lookupTimeout</code></td>
<td>120s</td>
Duration for an RPC remote endpoint lookup operation to wait before timing out.
<td>
</td>
</tr>
</table>
#### Scheduling