aboutsummaryrefslogtreecommitdiff
path: root/core/src
diff options
context:
space:
mode:
authorAaron Davidson <aaron@databricks.com>2015-05-08 17:13:55 -0700
committerAndrew Or <andrew@databricks.com>2015-05-08 17:13:55 -0700
commitffdc40ce7a799f2564f57b958d0f32f1d1636488 (patch)
tree854316c85a71eecfedf3d44bd450123da9884d42 /core/src
parent1c78f6866ebbcfb41d9875bfa3c0b9fa23b188bf (diff)
downloadspark-ffdc40ce7a799f2564f57b958d0f32f1d1636488.tar.gz
spark-ffdc40ce7a799f2564f57b958d0f32f1d1636488.tar.bz2
spark-ffdc40ce7a799f2564f57b958d0f32f1d1636488.zip
[SPARK-6955] Perform port retries at NettyBlockTransferService level
Currently we're doing port retries in the TransportServer level, but this is not specified by the TransportContext API and it has other further-reaching impacts like causing undesirable behavior for the Yarn and Standalone shuffle services. Author: Aaron Davidson <aaron@databricks.com> Closes #5575 from aarondav/port-bind and squashes the following commits: 3c2d6ed [Aaron Davidson] Oops, never do it. a5d9432 [Aaron Davidson] Remove shouldHostShuffleServiceIfEnabled e901eb2 [Aaron Davidson] fix local-cluster mode for ExternalShuffleServiceSuite 59e5e38 [Aaron Davidson] [SPARK-6955] Perform port retries at NettyBlockTransferService level
Diffstat (limited to 'core/src')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/network/netty/NettyBlockTransferService.scala14
-rw-r--r--core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala78
3 files changed, 93 insertions, 3 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala b/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
index f0e77c2ba9..860e1a2490 100644
--- a/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
@@ -48,7 +48,9 @@ class LocalSparkCluster(
logInfo("Starting a local Spark cluster with " + numWorkers + " workers.")
// Disable REST server on Master in this mode unless otherwise specified
- val _conf = conf.clone().setIfMissing("spark.master.rest.enabled", "false")
+ val _conf = conf.clone()
+ .setIfMissing("spark.master.rest.enabled", "false")
+ .set("spark.shuffle.service.enabled", "false")
/* Start the Master */
val (masterSystem, masterPort, _, _) = Master.startSystemAndActor(localHostname, 0, 0, _conf)
diff --git a/core/src/main/scala/org/apache/spark/network/netty/NettyBlockTransferService.scala b/core/src/main/scala/org/apache/spark/network/netty/NettyBlockTransferService.scala
index 6181c0ee9f..d650d5fe73 100644
--- a/core/src/main/scala/org/apache/spark/network/netty/NettyBlockTransferService.scala
+++ b/core/src/main/scala/org/apache/spark/network/netty/NettyBlockTransferService.scala
@@ -59,12 +59,22 @@ class NettyBlockTransferService(conf: SparkConf, securityManager: SecurityManage
}
transportContext = new TransportContext(transportConf, rpcHandler)
clientFactory = transportContext.createClientFactory(clientBootstrap.toList)
- server = transportContext.createServer(conf.getInt("spark.blockManager.port", 0),
- serverBootstrap.toList)
+ server = createServer(serverBootstrap.toList)
appId = conf.getAppId
logInfo("Server created on " + server.getPort)
}
+ /** Creates and binds the TransportServer, possibly trying multiple ports. */
+ private def createServer(bootstraps: List[TransportServerBootstrap]): TransportServer = {
+ def startService(port: Int): (TransportServer, Int) = {
+ val server = transportContext.createServer(port, bootstraps)
+ (server, server.getPort)
+ }
+
+ val portToTry = conf.getInt("spark.blockManager.port", 0)
+ Utils.startServiceOnPort(portToTry, startService, conf, getClass.getName)._1
+ }
+
override def fetchBlocks(
host: String,
port: Int,
diff --git a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala
new file mode 100644
index 0000000000..a41f8b7ce5
--- /dev/null
+++ b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.network.netty
+
+import org.apache.spark.network.BlockDataManager
+import org.apache.spark.{SecurityManager, SparkConf}
+import org.mockito.Mockito.mock
+import org.scalatest._
+
+class NettyBlockTransferServiceSuite extends FunSuite with BeforeAndAfterEach with ShouldMatchers {
+ private var service0: NettyBlockTransferService = _
+ private var service1: NettyBlockTransferService = _
+
+ override def afterEach() {
+ if (service0 != null) {
+ service0.close()
+ service0 = null
+ }
+
+ if (service1 != null) {
+ service1.close()
+ service1 = null
+ }
+ }
+
+ test("can bind to a random port") {
+ service0 = createService(port = 0)
+ service0.port should not be 0
+ }
+
+ test("can bind to two random ports") {
+ service0 = createService(port = 0)
+ service1 = createService(port = 0)
+ service0.port should not be service1.port
+ }
+
+ test("can bind to a specific port") {
+ val port = 17634
+ service0 = createService(port)
+ service0.port should be >= port
+ service0.port should be <= (port + 10) // avoid testing equality in case of simultaneous tests
+ }
+
+ test("can bind to a specific port twice and the second increments") {
+ val port = 17634
+ service0 = createService(port)
+ service1 = createService(port)
+ service0.port should be >= port
+ service0.port should be <= (port + 10)
+ service1.port should be (service0.port + 1)
+ }
+
+ private def createService(port: Int): NettyBlockTransferService = {
+ val conf = new SparkConf()
+ .set("spark.app.id", s"test-${getClass.getName}")
+ .set("spark.blockManager.port", port.toString)
+ val securityManager = new SecurityManager(conf)
+ val blockDataManager = mock(classOf[BlockDataManager])
+ val service = new NettyBlockTransferService(conf, securityManager, numCores = 1)
+ service.init(blockDataManager)
+ service
+ }
+}