aboutsummaryrefslogtreecommitdiff
path: root/repl/scala-2.11
diff options
context:
space:
mode:
Diffstat (limited to 'repl/scala-2.11')
-rw-r--r--repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala23
1 files changed, 12 insertions, 11 deletions
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
index 455a6b9a93..44650f25f7 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
@@ -28,11 +28,13 @@ import org.apache.spark.sql.SQLContext
object Main extends Logging {
val conf = new SparkConf()
- val tmp = System.getProperty("java.io.tmpdir")
- val rootDir = conf.get("spark.repl.classdir", tmp)
- val outputDir = Utils.createTempDir(rootDir)
+ val rootDir = conf.getOption("spark.repl.classdir").getOrElse(Utils.getLocalDir(conf))
+ val outputDir = Utils.createTempDir(root = rootDir, namePrefix = "repl")
+ val s = new Settings()
+ s.processArguments(List("-Yrepl-class-based",
+ "-Yrepl-outdir", s"${outputDir.getAbsolutePath}",
+ "-classpath", getAddedJars.mkString(File.pathSeparator)), true)
// the creation of SecurityManager has to be lazy so SPARK_YARN_MODE is set if needed
- lazy val classServer = new HttpServer(conf, outputDir, new SecurityManager(conf))
var sparkContext: SparkContext = _
var sqlContext: SQLContext = _
var interp = new SparkILoop // this is a public var because tests reset it.
@@ -45,7 +47,6 @@ object Main extends Logging {
}
def main(args: Array[String]) {
-
val interpArguments = List(
"-Yrepl-class-based",
"-Yrepl-outdir", s"${outputDir.getAbsolutePath}",
@@ -57,11 +58,7 @@ object Main extends Logging {
if (!hasErrors) {
if (getMaster == "yarn-client") System.setProperty("SPARK_YARN_MODE", "true")
- // Start the classServer and store its URI in a spark system property
- // (which will be passed to executors so that they can connect to it)
- classServer.start()
interp.process(settings) // Repl starts and goes in loop of R.E.P.L
- classServer.stop()
Option(sparkContext).map(_.stop)
}
}
@@ -82,9 +79,13 @@ object Main extends Logging {
val conf = new SparkConf()
.setMaster(getMaster)
.setJars(jars)
- .set("spark.repl.class.uri", classServer.uri)
.setIfMissing("spark.app.name", "Spark shell")
- logInfo("Spark class server started at " + classServer.uri)
+ // SparkContext will detect this configuration and register it with the RpcEnv's
+ // file server, setting spark.repl.class.uri to the actual URI for executors to
+ // use. This is sort of ugly but since executors are started as part of SparkContext
+ // initialization in certain cases, there's an initialization order issue that prevents
+ // this from being set after SparkContext is instantiated.
+ .set("spark.repl.class.outputDir", outputDir.getAbsolutePath())
if (execUri != null) {
conf.set("spark.executor.uri", execUri)
}