You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@systemml.apache.org by du...@apache.org on 2017/04/04 21:38:20 UTC
incubator-systemml git commit: [SYSTEMML-1460] Add `epochs` parameter
to `mnist_lenet::train(...)` function.
Repository: incubator-systemml
Updated Branches:
refs/heads/master f12759d75 -> abbce2bc7
[SYSTEMML-1460] Add `epochs` parameter to `mnist_lenet::train(...)` function.
This commit extracts the `epochs` variable out as a parameter of the
`mnist_lenet::train(...)` function.
Closes #450.
Project: http://git-wip-us.apache.org/repos/asf/incubator-systemml/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-systemml/commit/abbce2bc
Tree: http://git-wip-us.apache.org/repos/asf/incubator-systemml/tree/abbce2bc
Diff: http://git-wip-us.apache.org/repos/asf/incubator-systemml/diff/abbce2bc
Branch: refs/heads/master
Commit: abbce2bc7e2a281b27aff15e2cc523b41370ad00
Parents: f12759d
Author: Mike Dusenberry <mw...@us.ibm.com>
Authored: Tue Apr 4 14:34:46 2017 -0700
Committer: Mike Dusenberry <mw...@us.ibm.com>
Committed: Tue Apr 4 14:34:46 2017 -0700
----------------------------------------------------------------------
.../SystemML-NN/examples/mnist_lenet-train.dml | 32 +++++++++++---------
.../SystemML-NN/examples/mnist_lenet.dml | 4 +--
2 files changed, 20 insertions(+), 16 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/abbce2bc/scripts/staging/SystemML-NN/examples/mnist_lenet-train.dml
----------------------------------------------------------------------
diff --git a/scripts/staging/SystemML-NN/examples/mnist_lenet-train.dml b/scripts/staging/SystemML-NN/examples/mnist_lenet-train.dml
index 3bfc169..eafb34c 100644
--- a/scripts/staging/SystemML-NN/examples/mnist_lenet-train.dml
+++ b/scripts/staging/SystemML-NN/examples/mnist_lenet-train.dml
@@ -32,8 +32,10 @@
# - C: Number of color chanels in the images.
# - Hin: Input image height.
# - Win: Input image width.
-# - out_dir: Directory to store weights and bias matrices of
-# trained model, as well as final test accuracy.
+# - epochs: [DEFAULT: 10] Total number of full training loops over
+# the full data set.
+# - out_dir: [DEFAULT: "."] Directory to store weights and bias
+# matrices of trained model, as well as final test accuracy.
# - fmt: [DEFAULT: "csv"] File format of `train` and `test` data.
# Options include: "csv", "mm", "text", and "binary".
#
@@ -60,18 +62,20 @@
# --conf spark.driver.maxResultSize=0 --conf spark.akka.frameSize=128
# $SYSTEMML_HOME/target/SystemML.jar -f mnist_lenet-train.dml
# -nvargs train=data/mnist/mnist_train.csv test=data/mnist/mnist_test.csv
-# C=1 Hin=28 Win=28 out_dir=model/mnist_lenet
+# C=1 Hin=28 Win=28 epochs=10 out_dir=model/mnist_lenet
# ```
#
source("mnist_lenet.dml") as mnist_lenet
-# Read training data
+# Read training data & settings
fmt = ifdef($fmt, "csv")
train = read($train, format=fmt)
test = read($test, format=fmt)
C = $C
Hin = $Hin
Win = $Win
+epochs = ifdef($epochs, 10)
+out_dir = ifdef($out_dir, ".")
# Extract images and labels
images = train[,2:ncol(train)]
@@ -94,17 +98,17 @@ y = labels[5001:nrow(images),]
y_val = labels[1:5000,]
# Train
-[W1, b1, W2, b2, W3, b3, W4, b4] = mnist_lenet::train(X, y, X_val, y_val, C, Hin, Win)
+[W1, b1, W2, b2, W3, b3, W4, b4] = mnist_lenet::train(X, y, X_val, y_val, C, Hin, Win, epochs)
# Write model out
-write(W1, $out_dir+"/W1")
-write(b1, $out_dir+"/b1")
-write(W2, $out_dir+"/W2")
-write(b2, $out_dir+"/b2")
-write(W3, $out_dir+"/W3")
-write(b3, $out_dir+"/b3")
-write(W4, $out_dir+"/W4")
-write(b4, $out_dir+"/b4")
+write(W1, out_dir+"/W1")
+write(b1, out_dir+"/b1")
+write(W2, out_dir+"/W2")
+write(b2, out_dir+"/b2")
+write(W3, out_dir+"/W3")
+write(b3, out_dir+"/b3")
+write(W4, out_dir+"/W4")
+write(b4, out_dir+"/b4")
# Eval on test set
probs = mnist_lenet::predict(X_test, C, Hin, Win, W1, b1, W2, b2, W3, b3, W4, b4)
@@ -112,7 +116,7 @@ probs = mnist_lenet::predict(X_test, C, Hin, Win, W1, b1, W2, b2, W3, b3, W4, b4
# Output results
print("Test Accuracy: " + accuracy)
-write(accuracy, $out_dir+"/accuracy")
+write(accuracy, out_dir+"/accuracy")
print("")
print("")
http://git-wip-us.apache.org/repos/asf/incubator-systemml/blob/abbce2bc/scripts/staging/SystemML-NN/examples/mnist_lenet.dml
----------------------------------------------------------------------
diff --git a/scripts/staging/SystemML-NN/examples/mnist_lenet.dml b/scripts/staging/SystemML-NN/examples/mnist_lenet.dml
index a261b41..e5755c4 100644
--- a/scripts/staging/SystemML-NN/examples/mnist_lenet.dml
+++ b/scripts/staging/SystemML-NN/examples/mnist_lenet.dml
@@ -35,7 +35,7 @@ source("nn/optim/sgd_nesterov.dml") as sgd_nesterov
train = function(matrix[double] X, matrix[double] y,
matrix[double] X_val, matrix[double] y_val,
- int C, int Hin, int Win)
+ int C, int Hin, int Win, int epochs)
return (matrix[double] W1, matrix[double] b1,
matrix[double] W2, matrix[double] b2,
matrix[double] W3, matrix[double] b3,
@@ -55,6 +55,7 @@ train = function(matrix[double] X, matrix[double] y,
* - C: Number of input channels (dimensionality of input depth).
* - Hin: Input height.
* - Win: Input width.
+ * - epochs: Total number of full training loops over the full data set.
*
* Outputs:
* - W1: 1st layer weights (parameters) matrix, of shape (F1, C*Hf*Wf).
@@ -102,7 +103,6 @@ train = function(matrix[double] X, matrix[double] y,
# Optimize
print("Starting optimization")
batch_size = 64
- epochs = 10
iters = ceil(N / batch_size)
for (e in 1:epochs) {
for(i in 1:iters) {