From ce9f1bbe20eff794cd1d588dc88f109d32588cfe Mon Sep 17 00:00:00 2001 From: Josh Rosen Date: Tue, 1 Jan 2013 21:25:49 -0800 Subject: Add `pyspark` script to replace the other scripts. Expand the PySpark programming guide. --- python/pyspark/shell.py | 36 ++++++++++-------------------------- python/run-tests | 9 +++++++++ 2 files changed, 19 insertions(+), 26 deletions(-) create mode 100755 python/run-tests (limited to 'python') diff --git a/python/pyspark/shell.py b/python/pyspark/shell.py index bd39b0283f..7e6ad3aa76 100644 --- a/python/pyspark/shell.py +++ b/python/pyspark/shell.py @@ -1,33 +1,17 @@ """ An interactive shell. -""" -import optparse # I prefer argparse, but it's not included with Python < 2.7 -import code -import sys +This fle is designed to be launched as a PYTHONSTARTUP script. +""" +import os from pyspark.context import SparkContext -def main(master='local', ipython=False): - sc = SparkContext(master, 'PySparkShell') - user_ns = {'sc' : sc} - banner = "Spark context avaiable as sc." - if ipython: - import IPython - IPython.embed(user_ns=user_ns, banner2=banner) - else: - print banner - code.interact(local=user_ns) - +sc = SparkContext(os.environ.get("MASTER", "local"), "PySparkShell") +print "Spark context avaiable as sc." -if __name__ == '__main__': - usage = "usage: %prog [options] master" - parser = optparse.OptionParser(usage=usage) - parser.add_option("-i", "--ipython", help="Run IPython shell", - action="store_true") - (options, args) = parser.parse_args() - if len(sys.argv) > 1: - master = args[0] - else: - master = 'local' - main(master, options.ipython) +# The ./pyspark script stores the old PYTHONSTARTUP value in OLD_PYTHONSTARTUP, +# which allows us to execute the user's PYTHONSTARTUP file: +_pythonstartup = os.environ.get('OLD_PYTHONSTARTUP') +if _pythonstartup and os.path.isfile(_pythonstartup): + execfile(_pythonstartup) diff --git a/python/run-tests b/python/run-tests new file mode 100755 index 0000000000..da9e24cb1f --- /dev/null +++ b/python/run-tests @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +# Figure out where the Scala framework is installed +FWDIR="$(cd `dirname $0`; cd ../; pwd)" + +$FWDIR/pyspark pyspark/rdd.py +$FWDIR/pyspark -m doctest pyspark/broadcast.py + +# TODO: in the long-run, it would be nice to use a test runner like `nose`. -- cgit v1.2.3