aboutsummaryrefslogtreecommitdiff
path: root/repl/scala-2.11/src
diff options
context:
space:
mode:
Diffstat (limited to 'repl/scala-2.11/src')
-rw-r--r--repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala32
-rw-r--r--repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala4
2 files changed, 9 insertions, 27 deletions
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
index 999e7ad3cc..a58f4234da 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
@@ -27,6 +27,8 @@ import org.apache.spark.sql.SQLContext
object Main extends Logging {
+ initializeLogIfNecessary(true)
+
val conf = new SparkConf()
val rootDir = conf.getOption("spark.repl.classdir").getOrElse(Utils.getLocalDir(conf))
val outputDir = Utils.createTempDir(root = rootDir, namePrefix = "repl")
@@ -50,39 +52,27 @@ object Main extends Logging {
// Visible for testing
private[repl] def doMain(args: Array[String], _interp: SparkILoop): Unit = {
interp = _interp
+ val jars = conf.getOption("spark.jars")
+ .map(_.replace(",", File.pathSeparator))
+ .getOrElse("")
val interpArguments = List(
"-Yrepl-class-based",
"-Yrepl-outdir", s"${outputDir.getAbsolutePath}",
- "-classpath", getAddedJars.mkString(File.pathSeparator)
+ "-classpath", jars
) ++ args.toList
val settings = new GenericRunnerSettings(scalaOptionError)
settings.processArguments(interpArguments, true)
if (!hasErrors) {
- if (getMaster == "yarn-client") System.setProperty("SPARK_YARN_MODE", "true")
interp.process(settings) // Repl starts and goes in loop of R.E.P.L
Option(sparkContext).map(_.stop)
}
}
- def getAddedJars: Array[String] = {
- val envJars = sys.env.get("ADD_JARS")
- if (envJars.isDefined) {
- logWarning("ADD_JARS environment variable is deprecated, use --jar spark submit argument instead")
- }
- val propJars = sys.props.get("spark.jars").flatMap { p => if (p == "") None else Some(p) }
- val jars = propJars.orElse(envJars).getOrElse("")
- Utils.resolveURIs(jars).split(",").filter(_.nonEmpty)
- }
-
def createSparkContext(): SparkContext = {
val execUri = System.getenv("SPARK_EXECUTOR_URI")
- val jars = getAddedJars
- val conf = new SparkConf()
- .setMaster(getMaster)
- .setJars(jars)
- .setIfMissing("spark.app.name", "Spark shell")
+ conf.setIfMissing("spark.app.name", "Spark shell")
// SparkContext will detect this configuration and register it with the RpcEnv's
// file server, setting spark.repl.class.uri to the actual URI for executors to
// use. This is sort of ugly but since executors are started as part of SparkContext
@@ -115,12 +105,4 @@ object Main extends Logging {
sqlContext
}
- private def getMaster: String = {
- val master = {
- val envMaster = sys.env.get("MASTER")
- val propMaster = sys.props.get("spark.master")
- propMaster.orElse(envMaster).getOrElse("local[*]")
- }
- master
- }
}
diff --git a/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index 239096be79..6bee880640 100644
--- a/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -48,8 +48,8 @@ class ReplSuite extends SparkFunSuite {
val oldExecutorClasspath = System.getProperty(CONF_EXECUTOR_CLASSPATH)
System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath)
- System.setProperty("spark.master", master)
- Main.doMain(Array("-classpath", classpath), new SparkILoop(in, new PrintWriter(out)))
+ Main.conf.set("spark.master", master)
+ Main.doMain(Array("-classpath", classpath), new SparkILoop(in, new PrintWriter(out)))
if (oldExecutorClasspath != null) {
System.setProperty(CONF_EXECUTOR_CLASSPATH, oldExecutorClasspath)