From 1afdeaeb2f436084a6fbe8d73690f148f7b462c4 Mon Sep 17 00:00:00 2001 From: Reza Zadeh Date: Fri, 10 Jan 2014 21:30:54 -0800 Subject: add dimension parameters to example --- .../src/main/scala/org/apache/spark/examples/SparkSVD.scala | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) (limited to 'examples/src') diff --git a/examples/src/main/scala/org/apache/spark/examples/SparkSVD.scala b/examples/src/main/scala/org/apache/spark/examples/SparkSVD.scala index d9c672f140..ce7c1c48b5 100644 --- a/examples/src/main/scala/org/apache/spark/examples/SparkSVD.scala +++ b/examples/src/main/scala/org/apache/spark/examples/SparkSVD.scala @@ -29,12 +29,12 @@ import org.apache.spark.mllib.linalg.SparseMatrix * Where i is the column, j the row, and value is the matrix entry * * For example input file, see: - * mllib/data/als/test.data + * mllib/data/als/test.data (example is 4 x 4) */ object SparkSVD { def main(args: Array[String]) { - if (args.length != 2) { - System.err.println("Usage: SparkSVD ") + if (args.length != 4) { + System.err.println("Usage: SparkSVD m n") System.exit(1) } val sc = new SparkContext(args(0), "SVD", @@ -45,8 +45,8 @@ object SparkSVD { val parts = line.split(',') MatrixEntry(parts(0).toInt, parts(1).toInt, parts(2).toDouble) } - val m = 4 - val n = 4 + val m = args(2).toInt + val n = args(3).toInt // recover largest singular vector val decomposed = SVD.sparseSVD(SparseMatrix(data, m, n), 1) -- cgit v1.2.3