aboutsummaryrefslogtreecommitdiff
path: root/repl/scala-2.11/src
diff options
context:
space:
mode:
authorMarcelo Vanzin <vanzin@cloudera.com>2016-03-14 14:27:33 -0700
committerMarcelo Vanzin <vanzin@cloudera.com>2016-03-14 14:27:33 -0700
commit8301fadd8d269da11e72870b7a889596e3337839 (patch)
tree74c42c6c99d0438f3669acdae8982f3632259254 /repl/scala-2.11/src
parent38529d8f2350feb1f143aab0be336050c0f887f2 (diff)
downloadspark-8301fadd8d269da11e72870b7a889596e3337839.tar.gz
spark-8301fadd8d269da11e72870b7a889596e3337839.tar.bz2
spark-8301fadd8d269da11e72870b7a889596e3337839.zip
[SPARK-13626][CORE] Avoid duplicate config deprecation warnings.
Three different things were needed to get rid of spurious warnings: - silence deprecation warnings when cloning configuration - change the way SparkHadoopUtil instantiates SparkConf to silence warnings - avoid creating new SparkConf instances where it's not needed. On top of that, I changed the way that Logging.scala detects the repl; now it uses a method that is overridden in the repl's Main class, and the hack in Utils.scala is not needed anymore. This makes the 2.11 repl behave like the 2.10 one and set the default log level to WARN, which is a lot better. Previously, this wasn't working because the 2.11 repl triggers log initialization earlier than the 2.10 one. I also removed and simplified some other code in the 2.11 repl's Main to avoid replicating logic that already exists elsewhere in Spark. Tested the 2.11 repl in local and yarn modes. Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #11510 from vanzin/SPARK-13626.
Diffstat (limited to 'repl/scala-2.11/src')
-rw-r--r--repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala32
-rw-r--r--repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala4
2 files changed, 9 insertions, 27 deletions
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
index 999e7ad3cc..a58f4234da 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
@@ -27,6 +27,8 @@ import org.apache.spark.sql.SQLContext
object Main extends Logging {
+ initializeLogIfNecessary(true)
+
val conf = new SparkConf()
val rootDir = conf.getOption("spark.repl.classdir").getOrElse(Utils.getLocalDir(conf))
val outputDir = Utils.createTempDir(root = rootDir, namePrefix = "repl")
@@ -50,39 +52,27 @@ object Main extends Logging {
// Visible for testing
private[repl] def doMain(args: Array[String], _interp: SparkILoop): Unit = {
interp = _interp
+ val jars = conf.getOption("spark.jars")
+ .map(_.replace(",", File.pathSeparator))
+ .getOrElse("")
val interpArguments = List(
"-Yrepl-class-based",
"-Yrepl-outdir", s"${outputDir.getAbsolutePath}",
- "-classpath", getAddedJars.mkString(File.pathSeparator)
+ "-classpath", jars
) ++ args.toList
val settings = new GenericRunnerSettings(scalaOptionError)
settings.processArguments(interpArguments, true)
if (!hasErrors) {
- if (getMaster == "yarn-client") System.setProperty("SPARK_YARN_MODE", "true")
interp.process(settings) // Repl starts and goes in loop of R.E.P.L
Option(sparkContext).map(_.stop)
}
}
- def getAddedJars: Array[String] = {
- val envJars = sys.env.get("ADD_JARS")
- if (envJars.isDefined) {
- logWarning("ADD_JARS environment variable is deprecated, use --jar spark submit argument instead")
- }
- val propJars = sys.props.get("spark.jars").flatMap { p => if (p == "") None else Some(p) }
- val jars = propJars.orElse(envJars).getOrElse("")
- Utils.resolveURIs(jars).split(",").filter(_.nonEmpty)
- }
-
def createSparkContext(): SparkContext = {
val execUri = System.getenv("SPARK_EXECUTOR_URI")
- val jars = getAddedJars
- val conf = new SparkConf()
- .setMaster(getMaster)
- .setJars(jars)
- .setIfMissing("spark.app.name", "Spark shell")
+ conf.setIfMissing("spark.app.name", "Spark shell")
// SparkContext will detect this configuration and register it with the RpcEnv's
// file server, setting spark.repl.class.uri to the actual URI for executors to
// use. This is sort of ugly but since executors are started as part of SparkContext
@@ -115,12 +105,4 @@ object Main extends Logging {
sqlContext
}
- private def getMaster: String = {
- val master = {
- val envMaster = sys.env.get("MASTER")
- val propMaster = sys.props.get("spark.master")
- propMaster.orElse(envMaster).getOrElse("local[*]")
- }
- master
- }
}
diff --git a/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index 239096be79..6bee880640 100644
--- a/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -48,8 +48,8 @@ class ReplSuite extends SparkFunSuite {
val oldExecutorClasspath = System.getProperty(CONF_EXECUTOR_CLASSPATH)
System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath)
- System.setProperty("spark.master", master)
- Main.doMain(Array("-classpath", classpath), new SparkILoop(in, new PrintWriter(out)))
+ Main.conf.set("spark.master", master)
+ Main.doMain(Array("-classpath", classpath), new SparkILoop(in, new PrintWriter(out)))
if (oldExecutorClasspath != null) {
System.setProperty(CONF_EXECUTOR_CLASSPATH, oldExecutorClasspath)