You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2014/01/22 23:02:20 UTC

[37/50] git commit: add dimension parameters to example

add dimension parameters to example


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/1afdeaeb
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/1afdeaeb
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/1afdeaeb

Branch: refs/heads/master
Commit: 1afdeaeb2f436084a6fbe8d73690f148f7b462c4
Parents: 21c8a54
Author: Reza Zadeh <ri...@gmail.com>
Authored: Fri Jan 10 21:30:54 2014 -0800
Committer: Reza Zadeh <ri...@gmail.com>
Committed: Fri Jan 10 21:30:54 2014 -0800

----------------------------------------------------------------------
 .../main/scala/org/apache/spark/examples/SparkSVD.scala   | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/1afdeaeb/examples/src/main/scala/org/apache/spark/examples/SparkSVD.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/SparkSVD.scala b/examples/src/main/scala/org/apache/spark/examples/SparkSVD.scala
index d9c672f..ce7c1c4 100644
--- a/examples/src/main/scala/org/apache/spark/examples/SparkSVD.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkSVD.scala
@@ -29,12 +29,12 @@ import org.apache.spark.mllib.linalg.SparseMatrix
  * Where i is the column, j the row, and value is the matrix entry
  * 
  * For example input file, see:
- * mllib/data/als/test.data
+ * mllib/data/als/test.data  (example is 4 x 4)
  */
 object SparkSVD {
   def main(args: Array[String]) {
-   if (args.length != 2) {
-      System.err.println("Usage: SparkSVD <master> <file>")
+   if (args.length != 4) {
+      System.err.println("Usage: SparkSVD <master> <file> m n")
       System.exit(1)
     }
     val sc = new SparkContext(args(0), "SVD",
@@ -45,8 +45,8 @@ object SparkSVD {
       val parts = line.split(',')
       MatrixEntry(parts(0).toInt, parts(1).toInt, parts(2).toDouble)
     }
-    val m = 4
-    val n = 4
+    val m = args(2).toInt
+    val n = args(3).toInt
 
     // recover largest singular vector
     val decomposed = SVD.sparseSVD(SparseMatrix(data, m, n), 1)