aboutsummaryrefslogtreecommitdiff
path: root/python
diff options
context:
space:
mode:
authorAbhishekKr <abhikumar163@gmail.com>2014-04-16 19:05:40 -0700
committerReynold Xin <rxin@apache.org>2014-04-16 19:10:09 -0700
commitb3ad707c4411c3860691c9eb802fec425bf29e85 (patch)
tree69fe7fc456513117b12b9df548f4d6d867a68bb8 /python
parent13fb4c782621f1d2025f1cc99d7c4ddd9946961a (diff)
downloadspark-b3ad707c4411c3860691c9eb802fec425bf29e85.tar.gz
spark-b3ad707c4411c3860691c9eb802fec425bf29e85.tar.bz2
spark-b3ad707c4411c3860691c9eb802fec425bf29e85.zip
[python alternative] pyspark require Python2, failing if system default is Py3 from shell.py
Python alternative for https://github.com/apache/spark/pull/392; managed from shell.py Author: AbhishekKr <abhikumar163@gmail.com> Closes #399 from abhishekkr/pyspark_shell and squashes the following commits: 134bdc9 [AbhishekKr] pyspark require Python2, failing if system default is Py3 from shell.py (cherry picked from commit bb76eae1b50e4bf18360220110f7d0a4bee672ec) Signed-off-by: Reynold Xin <rxin@apache.org>
Diffstat (limited to 'python')
-rw-r--r--python/pyspark/shell.py20
1 files changed, 14 insertions, 6 deletions
diff --git a/python/pyspark/shell.py b/python/pyspark/shell.py
index 61613dbed8..e8ba050655 100644
--- a/python/pyspark/shell.py
+++ b/python/pyspark/shell.py
@@ -20,6 +20,14 @@ An interactive shell.
This file is designed to be launched as a PYTHONSTARTUP script.
"""
+
+import sys
+if sys.version_info.major != 2:
+ print("Error: Default Python used is Python%s" % sys.version_info.major)
+ print("\tSet env variable PYSPARK_PYTHON to Python2 binary and re-run it.")
+ sys.exit(1)
+
+
import os
import platform
import pyspark
@@ -34,21 +42,21 @@ if os.environ.get("SPARK_EXECUTOR_URI"):
sc = SparkContext(os.environ.get("MASTER", "local[*]"), "PySparkShell", pyFiles=add_files)
-print """Welcome to
+print("""Welcome to
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
/__ / .__/\_,_/_/ /_/\_\ version 1.0.0-SNAPSHOT
/_/
-"""
-print "Using Python version %s (%s, %s)" % (
+""")
+print("Using Python version %s (%s, %s)" % (
platform.python_version(),
platform.python_build()[0],
- platform.python_build()[1])
-print "Spark context available as sc."
+ platform.python_build()[1]))
+ print("Spark context available as sc.")
if add_files != None:
- print "Adding files: [%s]" % ", ".join(add_files)
+ print("Adding files: [%s]" % ", ".join(add_files))
# The ./bin/pyspark script stores the old PYTHONSTARTUP value in OLD_PYTHONSTARTUP,
# which allows us to execute the user's PYTHONSTARTUP file: