aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorKaren Feng <karenfeng.us@gmail.com>2013-07-22 09:53:26 -0700
committerKaren Feng <karenfeng.us@gmail.com>2013-07-22 09:53:26 -0700
commit636b19f8336e3e0c166f9a5bebdc8e71adff2dce (patch)
tree7d30f7e2acc07baf939dd30b8a0db6d567935218 /core
parent865dc63bac1cb72ecb84038c0a7cd708cc9e19d7 (diff)
parent15fb3948330721aba4716fa80c75e9bafab1a1b6 (diff)
downloadspark-636b19f8336e3e0c166f9a5bebdc8e71adff2dce.tar.gz
spark-636b19f8336e3e0c166f9a5bebdc8e71adff2dce.tar.bz2
spark-636b19f8336e3e0c166f9a5bebdc8e71adff2dce.zip
Merge branch 'master' of https://github.com/mesos/spark into ui-808
Diffstat (limited to 'core')
-rw-r--r--core/src/hadoop1/scala/spark/deploy/SparkHadoopUtil.scala3
-rw-r--r--core/src/hadoop2/scala/spark/deploy/SparkHadoopUtil.scala3
-rw-r--r--core/src/main/scala/spark/SparkContext.scala7
-rw-r--r--core/src/main/scala/spark/deploy/master/Master.scala2
-rw-r--r--core/src/main/scala/spark/deploy/master/MasterArguments.scala5
-rw-r--r--core/src/main/scala/spark/deploy/master/ui/MasterWebUI.scala6
6 files changed, 19 insertions, 7 deletions
diff --git a/core/src/hadoop1/scala/spark/deploy/SparkHadoopUtil.scala b/core/src/hadoop1/scala/spark/deploy/SparkHadoopUtil.scala
index df55be1254..617954cb98 100644
--- a/core/src/hadoop1/scala/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/hadoop1/scala/spark/deploy/SparkHadoopUtil.scala
@@ -41,4 +41,7 @@ object SparkHadoopUtil {
// add any user credentials to the job conf which are necessary for running on a secure Hadoop cluster
def addCredentials(conf: JobConf) {}
+
+ def isYarnMode(): Boolean = { false }
+
}
diff --git a/core/src/hadoop2/scala/spark/deploy/SparkHadoopUtil.scala b/core/src/hadoop2/scala/spark/deploy/SparkHadoopUtil.scala
index df55be1254..617954cb98 100644
--- a/core/src/hadoop2/scala/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/hadoop2/scala/spark/deploy/SparkHadoopUtil.scala
@@ -41,4 +41,7 @@ object SparkHadoopUtil {
// add any user credentials to the job conf which are necessary for running on a secure Hadoop cluster
def addCredentials(conf: JobConf) {}
+
+ def isYarnMode(): Boolean = { false }
+
}
diff --git a/core/src/main/scala/spark/SparkContext.scala b/core/src/main/scala/spark/SparkContext.scala
index 46b9935cb7..c01e315e35 100644
--- a/core/src/main/scala/spark/SparkContext.scala
+++ b/core/src/main/scala/spark/SparkContext.scala
@@ -577,7 +577,12 @@ class SparkContext(
} else {
val uri = new URI(path)
val key = uri.getScheme match {
- case null | "file" => env.httpFileServer.addJar(new File(uri.getPath))
+ case null | "file" =>
+ if (SparkHadoopUtil.isYarnMode()) {
+ logWarning("local jar specified as parameter to addJar under Yarn mode")
+ return
+ }
+ env.httpFileServer.addJar(new File(uri.getPath))
case _ => path
}
addedJars(key) = System.currentTimeMillis
diff --git a/core/src/main/scala/spark/deploy/master/Master.scala b/core/src/main/scala/spark/deploy/master/Master.scala
index e5a7a87e2e..eddcafd84d 100644
--- a/core/src/main/scala/spark/deploy/master/Master.scala
+++ b/core/src/main/scala/spark/deploy/master/Master.scala
@@ -53,7 +53,7 @@ private[spark] class Master(host: String, port: Int, webUiPort: Int) extends Act
var firstApp: Option[ApplicationInfo] = None
- val webUi = new MasterWebUI(self)
+ val webUi = new MasterWebUI(self, webUiPort)
Utils.checkHost(host, "Expected hostname")
diff --git a/core/src/main/scala/spark/deploy/master/MasterArguments.scala b/core/src/main/scala/spark/deploy/master/MasterArguments.scala
index d0ec3d5ea0..0ae0160767 100644
--- a/core/src/main/scala/spark/deploy/master/MasterArguments.scala
+++ b/core/src/main/scala/spark/deploy/master/MasterArguments.scala
@@ -38,7 +38,10 @@ private[spark] class MasterArguments(args: Array[String]) {
if (System.getenv("SPARK_MASTER_WEBUI_PORT") != null) {
webUiPort = System.getenv("SPARK_MASTER_WEBUI_PORT").toInt
}
-
+ if (System.getProperty("master.ui.port") != null) {
+ webUiPort = System.getProperty("master.ui.port").toInt
+ }
+
parse(args.toList)
def parse(args: List[String]): Unit = args match {
diff --git a/core/src/main/scala/spark/deploy/master/ui/MasterWebUI.scala b/core/src/main/scala/spark/deploy/master/ui/MasterWebUI.scala
index 04b32c7968..dabc2d8dc7 100644
--- a/core/src/main/scala/spark/deploy/master/ui/MasterWebUI.scala
+++ b/core/src/main/scala/spark/deploy/master/ui/MasterWebUI.scala
@@ -32,12 +32,11 @@ import spark.ui.JettyUtils._
* Web UI server for the standalone master.
*/
private[spark]
-class MasterWebUI(val master: ActorRef, requestedPort: Option[Int] = None) extends Logging {
+class MasterWebUI(val master: ActorRef, requestedPort: Int) extends Logging {
implicit val timeout = Duration.create(
System.getProperty("spark.akka.askTimeout", "10").toLong, "seconds")
val host = Utils.localHostName()
- val port = requestedPort.getOrElse(
- System.getProperty("master.ui.port", MasterWebUI.DEFAULT_PORT).toInt)
+ val port = requestedPort
var server: Option[Server] = None
var boundPort: Option[Int] = None
@@ -72,5 +71,4 @@ class MasterWebUI(val master: ActorRef, requestedPort: Option[Int] = None) exten
private[spark] object MasterWebUI {
val STATIC_RESOURCE_DIR = "spark/ui/static"
- val DEFAULT_PORT = "8080"
}