aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorWangTaoTheTonic <barneystinson@aliyun.com>2014-09-17 21:59:23 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-09-17 21:59:23 -0700
commit3f169bfe3c322bf4344e13276dbbe34279b59ad0 (patch)
treea7c125bf5511d7bc1756cad8b94f535813198edc
parent1147973f1c7713013c7c0ca414482b511a730475 (diff)
downloadspark-3f169bfe3c322bf4344e13276dbbe34279b59ad0.tar.gz
spark-3f169bfe3c322bf4344e13276dbbe34279b59ad0.tar.bz2
spark-3f169bfe3c322bf4344e13276dbbe34279b59ad0.zip
[SPARK-3565]Fix configuration item not consistent with document
https://issues.apache.org/jira/browse/SPARK-3565 "spark.ports.maxRetries" should be "spark.port.maxRetries". Make the configuration keys in document and code consistent. Author: WangTaoTheTonic <barneystinson@aliyun.com> Closes #2427 from WangTaoTheTonic/fixPortRetries and squashes the following commits: c178813 [WangTaoTheTonic] Use blank lines trigger Jenkins 646f3fe [WangTaoTheTonic] also in SparkBuild.scala 3700dba [WangTaoTheTonic] Fix configuration item not consistent with document
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala6
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala2
-rw-r--r--docs/configuration.md2
-rw-r--r--project/SparkBuild.scala2
4 files changed, 7 insertions, 5 deletions
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index c76b7af184..ed06384432 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -1382,15 +1382,15 @@ private[spark] object Utils extends Logging {
}
/**
- * Default number of retries in binding to a port.
+ * Default maximum number of retries when binding to a port before giving up.
*/
val portMaxRetries: Int = {
if (sys.props.contains("spark.testing")) {
// Set a higher number of retries for tests...
- sys.props.get("spark.ports.maxRetries").map(_.toInt).getOrElse(100)
+ sys.props.get("spark.port.maxRetries").map(_.toInt).getOrElse(100)
} else {
Option(SparkEnv.get)
- .flatMap(_.conf.getOption("spark.ports.maxRetries"))
+ .flatMap(_.conf.getOption("spark.port.maxRetries"))
.map(_.toInt)
.getOrElse(16)
}
diff --git a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
index 2a58c6a40d..3f1cd0752e 100644
--- a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
@@ -115,11 +115,13 @@ class JsonProtocolSuite extends FunSuite {
workerInfo.lastHeartbeat = JsonConstants.currTimeInMillis
workerInfo
}
+
def createExecutorRunner(): ExecutorRunner = {
new ExecutorRunner("appId", 123, createAppDesc(), 4, 1234, null, "workerId", "host",
new File("sparkHome"), new File("workDir"), "akka://worker",
new SparkConf, ExecutorState.RUNNING)
}
+
def createDriverRunner(): DriverRunner = {
new DriverRunner(new SparkConf(), "driverId", new File("workDir"), new File("sparkHome"),
createDriverDesc(), null, "akka://worker")
diff --git a/docs/configuration.md b/docs/configuration.md
index 99faf51c6f..a6dd7245e1 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -657,7 +657,7 @@ Apart from these, the following properties are also available, and may be useful
<td><code>spark.port.maxRetries</code></td>
<td>16</td>
<td>
- Maximum number of retries when binding to a port before giving up.
+ Default maximum number of retries when binding to a port before giving up.
</td>
</tr>
<tr>
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index ab9f8ba120..12ac82293d 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -336,7 +336,7 @@ object TestSettings {
fork := true,
javaOptions in Test += "-Dspark.test.home=" + sparkHome,
javaOptions in Test += "-Dspark.testing=1",
- javaOptions in Test += "-Dspark.ports.maxRetries=100",
+ javaOptions in Test += "-Dspark.port.maxRetries=100",
javaOptions in Test += "-Dspark.ui.enabled=false",
javaOptions in Test += "-Dsun.io.serialization.extendedDebugInfo=true",
javaOptions in Test ++= System.getProperties.filter(_._1 startsWith "spark")