diff options
author | Dmitriy Lyubimov <dlyubimov@apache.org> | 2013-07-31 22:09:22 -0700 |
---|---|---|
committer | Dmitriy Lyubimov <dlyubimov@apache.org> | 2013-07-31 22:09:22 -0700 |
commit | cb6be5bd7eab8b3cf682a6d0347b87d216d43939 (patch) | |
tree | 5071c649b24b37e5669571981da8100aaec3aaf8 /core/src/main/scala/spark/scheduler/mesos/MesosSchedulerBackend.scala | |
parent | 28f1550f0134bad1391682135b9bfc43cb19fc01 (diff) | |
parent | 3097d75d6f5b93cac851dd6f43faed5a492b2676 (diff) | |
download | spark-cb6be5bd7eab8b3cf682a6d0347b87d216d43939.tar.gz spark-cb6be5bd7eab8b3cf682a6d0347b87d216d43939.tar.bz2 spark-cb6be5bd7eab8b3cf682a6d0347b87d216d43939.zip |
Merge remote-tracking branch 'mesos/master' into SPARK-826
Conflicts:
core/src/main/scala/spark/scheduler/cluster/ClusterTaskSetManager.scala
core/src/main/scala/spark/scheduler/local/LocalTaskSetManager.scala
core/src/test/scala/spark/KryoSerializerSuite.scala
Diffstat (limited to 'core/src/main/scala/spark/scheduler/mesos/MesosSchedulerBackend.scala')
-rw-r--r-- | core/src/main/scala/spark/scheduler/mesos/MesosSchedulerBackend.scala | 17 |
1 files changed, 12 insertions, 5 deletions
diff --git a/core/src/main/scala/spark/scheduler/mesos/MesosSchedulerBackend.scala b/core/src/main/scala/spark/scheduler/mesos/MesosSchedulerBackend.scala index 75b8268b55..f6069a5775 100644 --- a/core/src/main/scala/spark/scheduler/mesos/MesosSchedulerBackend.scala +++ b/core/src/main/scala/spark/scheduler/mesos/MesosSchedulerBackend.scala @@ -89,7 +89,6 @@ private[spark] class MesosSchedulerBackend( val sparkHome = sc.getSparkHome().getOrElse(throw new SparkException( "Spark home is not set; set it through the spark.home system " + "property, the SPARK_HOME environment variable or the SparkContext constructor")) - val execScript = new File(sparkHome, "spark-executor").getCanonicalPath val environment = Environment.newBuilder() sc.executorEnvs.foreach { case (key, value) => environment.addVariables(Environment.Variable.newBuilder() @@ -97,15 +96,23 @@ private[spark] class MesosSchedulerBackend( .setValue(value) .build()) } + val command = CommandInfo.newBuilder() + .setEnvironment(environment) + val uri = System.getProperty("spark.executor.uri") + if (uri == null) { + command.setValue(new File(sparkHome, "spark-executor").getCanonicalPath) + } else { + // Grab everything to the first '.'. We'll use that and '*' to + // glob the directory "correctly". + val basename = uri.split('/').last.split('.').head + command.setValue("cd %s*; ./spark-executor".format(basename)) + command.addUris(CommandInfo.URI.newBuilder().setValue(uri)) + } val memory = Resource.newBuilder() .setName("mem") .setType(Value.Type.SCALAR) .setScalar(Value.Scalar.newBuilder().setValue(executorMemory).build()) .build() - val command = CommandInfo.newBuilder() - .setValue(execScript) - .setEnvironment(environment) - .build() ExecutorInfo.newBuilder() .setExecutorId(ExecutorID.newBuilder().setValue(execId).build()) .setCommand(command) |