aboutsummaryrefslogtreecommitdiff
path: root/python
diff options
context:
space:
mode:
authorAbhishekKr <abhikumar163@gmail.com>2014-04-16 19:05:40 -0700
committerReynold Xin <rxin@apache.org>2014-04-16 19:05:40 -0700
commitbb76eae1b50e4bf18360220110f7d0a4bee672ec (patch)
tree470ec833cb14ee932607ce3114dc3a0439401197 /python
parent6ad4c5498d7fd241912044f893aa8a21b7c4d24b (diff)
downloadspark-bb76eae1b50e4bf18360220110f7d0a4bee672ec.tar.gz
spark-bb76eae1b50e4bf18360220110f7d0a4bee672ec.tar.bz2
spark-bb76eae1b50e4bf18360220110f7d0a4bee672ec.zip
[python alternative] pyspark require Python2, failing if system default is Py3 from shell.py
Python alternative for https://github.com/apache/spark/pull/392; managed from shell.py Author: AbhishekKr <abhikumar163@gmail.com> Closes #399 from abhishekkr/pyspark_shell and squashes the following commits: 134bdc9 [AbhishekKr] pyspark require Python2, failing if system default is Py3 from shell.py
Diffstat (limited to 'python')
-rw-r--r--python/pyspark/shell.py20
1 files changed, 14 insertions, 6 deletions
diff --git a/python/pyspark/shell.py b/python/pyspark/shell.py
index 61613dbed8..e8ba050655 100644
--- a/python/pyspark/shell.py
+++ b/python/pyspark/shell.py
@@ -20,6 +20,14 @@ An interactive shell.
This file is designed to be launched as a PYTHONSTARTUP script.
"""
+
+import sys
+if sys.version_info.major != 2:
+ print("Error: Default Python used is Python%s" % sys.version_info.major)
+ print("\tSet env variable PYSPARK_PYTHON to Python2 binary and re-run it.")
+ sys.exit(1)
+
+
import os
import platform
import pyspark
@@ -34,21 +42,21 @@ if os.environ.get("SPARK_EXECUTOR_URI"):
sc = SparkContext(os.environ.get("MASTER", "local[*]"), "PySparkShell", pyFiles=add_files)
-print """Welcome to
+print("""Welcome to
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
/__ / .__/\_,_/_/ /_/\_\ version 1.0.0-SNAPSHOT
/_/
-"""
-print "Using Python version %s (%s, %s)" % (
+""")
+print("Using Python version %s (%s, %s)" % (
platform.python_version(),
platform.python_build()[0],
- platform.python_build()[1])
-print "Spark context available as sc."
+ platform.python_build()[1]))
+ print("Spark context available as sc.")
if add_files != None:
- print "Adding files: [%s]" % ", ".join(add_files)
+ print("Adding files: [%s]" % ", ".join(add_files))
# The ./bin/pyspark script stores the old PYTHONSTARTUP value in OLD_PYTHONSTARTUP,
# which allows us to execute the user's PYTHONSTARTUP file: