aboutsummaryrefslogtreecommitdiff
path: root/examples/src/main/python/als.py
diff options
context:
space:
mode:
authorAndrew Or <andrewor14@gmail.com>2014-05-16 22:36:23 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-05-16 22:36:23 -0700
commitcf6cbe9f76c3b322a968c836d039fc5b70d4ce43 (patch)
tree7f1269166db1364d6f9393bd65d830a9948ce884 /examples/src/main/python/als.py
parent4b8ec6fcfd7a7ef0857d5b21917183c181301c95 (diff)
downloadspark-cf6cbe9f76c3b322a968c836d039fc5b70d4ce43.tar.gz
spark-cf6cbe9f76c3b322a968c836d039fc5b70d4ce43.tar.bz2
spark-cf6cbe9f76c3b322a968c836d039fc5b70d4ce43.zip
[SPARK-1824] Remove <master> from Python examples
A recent PR (#552) fixed this for all Scala / Java examples. We need to do it for python too. Note that this blocks on #799, which makes `bin/pyspark` go through Spark submit. With only the changes in this PR, the only way to run these examples is through Spark submit. Once #799 goes in, you can use `bin/pyspark` to run them too. For example, ``` bin/pyspark examples/src/main/python/pi.py 100 --master local-cluster[4,1,512] ``` Author: Andrew Or <andrewor14@gmail.com> Closes #802 from andrewor14/python-examples and squashes the following commits: cf50b9f [Andrew Or] De-indent python comments (minor) 50f80b1 [Andrew Or] Remove pyFiles from SparkContext construction c362f69 [Andrew Or] Update docs to use spark-submit for python applications 7072c6a [Andrew Or] Merge branch 'master' of github.com:apache/spark into python-examples 427a5f0 [Andrew Or] Update docs d32072c [Andrew Or] Remove <master> from examples + update usages
Diffstat (limited to 'examples/src/main/python/als.py')
-rwxr-xr-xexamples/src/main/python/als.py18
1 files changed, 9 insertions, 9 deletions
diff --git a/examples/src/main/python/als.py b/examples/src/main/python/als.py
index 01552dc1d4..f0b46cd28b 100755
--- a/examples/src/main/python/als.py
+++ b/examples/src/main/python/als.py
@@ -46,15 +46,15 @@ def update(i, vec, mat, ratings):
return np.linalg.solve(XtX, Xty)
if __name__ == "__main__":
- if len(sys.argv) < 2:
- print >> sys.stderr, "Usage: als <master> <M> <U> <F> <iters> <slices>"
- exit(-1)
- sc = SparkContext(sys.argv[1], "PythonALS", pyFiles=[realpath(__file__)])
- M = int(sys.argv[2]) if len(sys.argv) > 2 else 100
- U = int(sys.argv[3]) if len(sys.argv) > 3 else 500
- F = int(sys.argv[4]) if len(sys.argv) > 4 else 10
- ITERATIONS = int(sys.argv[5]) if len(sys.argv) > 5 else 5
- slices = int(sys.argv[6]) if len(sys.argv) > 6 else 2
+ """
+ Usage: als [M] [U] [F] [iterations] [slices]"
+ """
+ sc = SparkContext(appName="PythonALS")
+ M = int(sys.argv[1]) if len(sys.argv) > 1 else 100
+ U = int(sys.argv[2]) if len(sys.argv) > 2 else 500
+ F = int(sys.argv[3]) if len(sys.argv) > 3 else 10
+ ITERATIONS = int(sys.argv[4]) if len(sys.argv) > 4 else 5
+ slices = int(sys.argv[5]) if len(sys.argv) > 5 else 2
print "Running ALS with M=%d, U=%d, F=%d, iters=%d, slices=%d\n" % \
(M, U, F, ITERATIONS, slices)