aboutsummaryrefslogtreecommitdiff
path: root/python
diff options
context:
space:
mode:
authorJeff Zhang <zjffdu@apache.org>2016-01-26 14:58:39 -0800
committerReynold Xin <rxin@databricks.com>2016-01-26 14:58:39 -0800
commit19fdb21afbf0eae4483cf6d4ef32daffd1994b89 (patch)
tree3dff6c5747b6b1b3070fe0e35cfb6d69e3a76dfe /python
parent83507fea9f45c336d73dd4795b8cb37bcd63e31d (diff)
downloadspark-19fdb21afbf0eae4483cf6d4ef32daffd1994b89.tar.gz
spark-19fdb21afbf0eae4483cf6d4ef32daffd1994b89.tar.bz2
spark-19fdb21afbf0eae4483cf6d4ef32daffd1994b89.zip
[SPARK-12993][PYSPARK] Remove usage of ADD_FILES in pyspark
environment variable ADD_FILES is created for adding python files on spark context to be distributed to executors (SPARK-865), this is deprecated now. User are encouraged to use --py-files for adding python files. Author: Jeff Zhang <zjffdu@apache.org> Closes #10913 from zjffdu/SPARK-12993.
Diffstat (limited to 'python')
-rw-r--r--python/pyspark/shell.py11
1 files changed, 1 insertions, 10 deletions
diff --git a/python/pyspark/shell.py b/python/pyspark/shell.py
index 26cafca8b8..7c37f75193 100644
--- a/python/pyspark/shell.py
+++ b/python/pyspark/shell.py
@@ -32,15 +32,10 @@ from pyspark.context import SparkContext
from pyspark.sql import SQLContext, HiveContext
from pyspark.storagelevel import StorageLevel
-# this is the deprecated equivalent of ADD_JARS
-add_files = None
-if os.environ.get("ADD_FILES") is not None:
- add_files = os.environ.get("ADD_FILES").split(',')
-
if os.environ.get("SPARK_EXECUTOR_URI"):
SparkContext.setSystemProperty("spark.executor.uri", os.environ["SPARK_EXECUTOR_URI"])
-sc = SparkContext(pyFiles=add_files)
+sc = SparkContext()
atexit.register(lambda: sc.stop())
try:
@@ -68,10 +63,6 @@ print("Using Python version %s (%s, %s)" % (
platform.python_build()[1]))
print("SparkContext available as sc, %s available as sqlContext." % sqlContext.__class__.__name__)
-if add_files is not None:
- print("Warning: ADD_FILES environment variable is deprecated, use --py-files argument instead")
- print("Adding files: [%s]" % ", ".join(add_files))
-
# The ./bin/pyspark script stores the old PYTHONSTARTUP value in OLD_PYTHONSTARTUP,
# which allows us to execute the user's PYTHONSTARTUP file:
_pythonstartup = os.environ.get('OLD_PYTHONSTARTUP')