aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorWangTaoTheTonic <barneystinson@aliyun.com>2014-09-17 21:59:23 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-09-17 21:59:41 -0700
commit32f2222e915f31422089139944a077e2cbd442f9 (patch)
tree1f1e6237580ba42ef9eb3e744a647e654819f1fb
parent3f1f9744b176424e00d262256eba9bc721cef18b (diff)
downloadspark-32f2222e915f31422089139944a077e2cbd442f9.tar.gz
spark-32f2222e915f31422089139944a077e2cbd442f9.tar.bz2
spark-32f2222e915f31422089139944a077e2cbd442f9.zip
[SPARK-3565]Fix configuration item not consistent with document
https://issues.apache.org/jira/browse/SPARK-3565 "spark.ports.maxRetries" should be "spark.port.maxRetries". Make the configuration keys in document and code consistent. Author: WangTaoTheTonic <barneystinson@aliyun.com> Closes #2427 from WangTaoTheTonic/fixPortRetries and squashes the following commits: c178813 [WangTaoTheTonic] Use blank lines trigger Jenkins 646f3fe [WangTaoTheTonic] also in SparkBuild.scala 3700dba [WangTaoTheTonic] Fix configuration item not consistent with document (cherry picked from commit 3f169bfe3c322bf4344e13276dbbe34279b59ad0) Signed-off-by: Patrick Wendell <pwendell@gmail.com>
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala6
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala2
-rw-r--r--docs/configuration.md2
-rw-r--r--project/SparkBuild.scala2
4 files changed, 7 insertions, 5 deletions
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index c6eff9e455..12e69802f3 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -1412,15 +1412,15 @@ private[spark] object Utils extends Logging {
}
/**
- * Default number of retries in binding to a port.
+ * Default maximum number of retries when binding to a port before giving up.
*/
val portMaxRetries: Int = {
if (sys.props.contains("spark.testing")) {
// Set a higher number of retries for tests...
- sys.props.get("spark.ports.maxRetries").map(_.toInt).getOrElse(100)
+ sys.props.get("spark.port.maxRetries").map(_.toInt).getOrElse(100)
} else {
Option(SparkEnv.get)
- .flatMap(_.conf.getOption("spark.ports.maxRetries"))
+ .flatMap(_.conf.getOption("spark.port.maxRetries"))
.map(_.toInt)
.getOrElse(16)
}
diff --git a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
index 31aa7ec837..a923d14b69 100644
--- a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
@@ -115,11 +115,13 @@ class JsonProtocolSuite extends FunSuite {
workerInfo.lastHeartbeat = JsonConstants.currTimeInMillis
workerInfo
}
+
def createExecutorRunner(): ExecutorRunner = {
new ExecutorRunner("appId", 123, createAppDesc(), 4, 1234, null, "workerId", "host",
new File("sparkHome"), new File("workDir"), "akka://worker",
new SparkConf, ExecutorState.RUNNING)
}
+
def createDriverRunner(): DriverRunner = {
new DriverRunner("driverId", new File("workDir"), new File("sparkHome"), createDriverDesc(),
null, "akka://worker")
diff --git a/docs/configuration.md b/docs/configuration.md
index 65a422caab..9411230b0e 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -648,7 +648,7 @@ Apart from these, the following properties are also available, and may be useful
<td><code>spark.port.maxRetries</code></td>
<td>16</td>
<td>
- Maximum number of retries when binding to a port before giving up.
+ Default maximum number of retries when binding to a port before giving up.
</td>
</tr>
<tr>
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index c968a753c3..60603cd50c 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -336,7 +336,7 @@ object TestSettings {
fork := true,
javaOptions in Test += "-Dspark.test.home=" + sparkHome,
javaOptions in Test += "-Dspark.testing=1",
- javaOptions in Test += "-Dspark.ports.maxRetries=100",
+ javaOptions in Test += "-Dspark.port.maxRetries=100",
javaOptions in Test += "-Dspark.ui.enabled=false",
javaOptions in Test += "-Dsun.io.serialization.extendedDebugInfo=true",
javaOptions in Test ++= System.getProperties.filter(_._1 startsWith "spark")