aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--python/pyspark/shell.py8
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala7
-rw-r--r--repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala3
3 files changed, 12 insertions, 6 deletions
diff --git a/python/pyspark/shell.py b/python/pyspark/shell.py
index 89cf76920e..4cf4b89ccf 100644
--- a/python/pyspark/shell.py
+++ b/python/pyspark/shell.py
@@ -35,9 +35,10 @@ import pyspark
from pyspark.context import SparkContext
from pyspark.storagelevel import StorageLevel
-# this is the equivalent of ADD_JARS
-add_files = (os.environ.get("ADD_FILES").split(',')
- if os.environ.get("ADD_FILES") is not None else None)
+# this is the deprecated equivalent of ADD_JARS
+add_files = None
+if os.environ.get("ADD_FILES") is not None:
+ add_files = os.environ.get("ADD_FILES").split(',')
if os.environ.get("SPARK_EXECUTOR_URI"):
SparkContext.setSystemProperty("spark.executor.uri", os.environ["SPARK_EXECUTOR_URI"])
@@ -59,6 +60,7 @@ print("Using Python version %s (%s, %s)" % (
print("SparkContext available as sc.")
if add_files is not None:
+ print("Warning: ADD_FILES environment variable is deprecated, use --py-files argument instead")
print("Adding files: [%s]" % ", ".join(add_files))
# The ./bin/pyspark script stores the old PYTHONSTARTUP value in OLD_PYTHONSTARTUP,
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index b4db3df795..8dc0e0c965 100644
--- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -1064,15 +1064,16 @@ class SparkILoop(
private def main(settings: Settings): Unit = process(settings)
}
-object SparkILoop {
+object SparkILoop extends Logging {
implicit def loopToInterpreter(repl: SparkILoop): SparkIMain = repl.intp
private def echo(msg: String) = Console println msg
def getAddedJars: Array[String] = {
val envJars = sys.env.get("ADD_JARS")
- val propJars = sys.props.get("spark.jars").flatMap { p =>
- if (p == "") None else Some(p)
+ if (envJars.isDefined) {
+ logWarning("ADD_JARS environment variable is deprecated, use --jar spark submit argument instead")
}
+ val propJars = sys.props.get("spark.jars").flatMap { p => if (p == "") None else Some(p) }
val jars = propJars.orElse(envJars).getOrElse("")
Utils.resolveURIs(jars).split(",").filter(_.nonEmpty)
}
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
index dc25692749..2210fbaafe 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
@@ -51,6 +51,9 @@ object Main extends Logging {
def getAddedJars: Array[String] = {
val envJars = sys.env.get("ADD_JARS")
+ if (envJars.isDefined) {
+ logWarning("ADD_JARS environment variable is deprecated, use --jar spark submit argument instead")
+ }
val propJars = sys.props.get("spark.jars").flatMap { p => if (p == "") None else Some(p) }
val jars = propJars.orElse(envJars).getOrElse("")
Utils.resolveURIs(jars).split(",").filter(_.nonEmpty)