You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@systemml.apache.org by mb...@apache.org on 2016/08/24 16:06:43 UTC

incubator-systemml git commit: [SYSTEMML-657] Fix als-cg algorithm script (missing parentheses)

Repository: incubator-systemml
Updated Branches:
  refs/heads/master afe7ebec0 -> 827c4becd


[SYSTEMML-657] Fix als-cg algorithm script (missing parentheses)

The change of deprecating ppred introduced script level issues (missing
parentheses) that result in compilation issues. Note that this corrupted
script has never been released.

Project: http://git-wip-us.apache.org/repos/asf/incubator-systemml/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-systemml/commit/827c4bec
Tree: http://git-wip-us.apache.org/repos/asf/incubator-systemml/tree/827c4bec
Diff: http://git-wip-us.apache.org/repos/asf/incubator-systemml/diff/827c4bec

Branch: refs/heads/master
Commit: 827c4becd138ece712179748e0374da22614484d
Parents: afe7ebe
Author: Matthias Boehm <mb...@us.ibm.com>
Authored: Wed Aug 24 18:05:26 2016 +0200
Committer: Matthias Boehm <mb...@us.ibm.com>
Committed: Wed Aug 24 18:06:21 2016 +0200

----------------------------------------------------------------------
 scripts/algorithms/ALS-CG.dml | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/827c4bec/scripts/algorithms/ALS-CG.dml
----------------------------------------------------------------------
diff --git a/scripts/algorithms/ALS-CG.dml b/scripts/algorithms/ALS-CG.dml
index 79b1464..e5c85fc 100644
--- a/scripts/algorithms/ALS-CG.dml
+++ b/scripts/algorithms/ALS-CG.dml
@@ -73,7 +73,7 @@ n = ncol (X);
 U = rand (rows = m, cols = r, min = -0.5, max = 0.5); # mxr
 V = rand (rows = n, cols = r, min = -0.5, max = 0.5); # nxr
 
-W = X != 0;
+W = (X != 0);
   
 # check for regularization
 if( reg == "L2" ) {
@@ -101,7 +101,7 @@ is_U = TRUE;  # TRUE = Optimize U, FALSE = Optimize V
 maxinneriter = r ; # min (ncol (U), 15);
 
 if( check ) {
-  loss_init = 0.5 * sum (X != 0) * (U %*% t(V) - X) ^ 2);
+  loss_init = 0.5 * sum( (X != 0) * (U %*% t(V) - X) ^ 2);
   loss_init = loss_init + 0.5 * lambda * (sum (U ^ 2 * row_nonzeros) + sum (V ^ 2 * col_nonzeros));
   print ("-----   Initial train loss: " + loss_init + " -----");
 }
@@ -112,10 +112,10 @@ while( as.integer(it/2) < max_iter & ! converged )
 {
   it = it + 1;
   if( is_U ) {
-    G = (X != 0) * (U %*% t(V) - X)) %*% V + lambda * U * row_nonzeros;
+    G = ((X != 0) * (U %*% t(V) - X)) %*% V + lambda * U * row_nonzeros;
   } 
   else {
-    G = t(t(U) %*% (X != 0) * (U %*% t(V) - X))) + lambda * V * col_nonzeros;
+    G = t(t(U) %*% ((X != 0) * (U %*% t(V) - X))) + lambda * V * col_nonzeros;
   }
 
   R = -G;
@@ -149,7 +149,7 @@ while( as.integer(it/2) < max_iter & ! converged )
 	
   # check for convergence
   if( check & (it%%2 == 0) ) {
-    loss_cur = 0.5 * sum (X != 0) * (U %*% t(V) - X) ^ 2);
+    loss_cur = 0.5 * sum( (X != 0) * (U %*% t(V) - X) ^ 2);
     loss_cur = loss_cur + 0.5 * lambda * (sum (U ^ 2 * row_nonzeros) + sum (V ^ 2 * col_nonzeros));
 	
     loss_dec = (loss_init - loss_cur) / loss_init;