You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@singa.apache.org by zh...@apache.org on 2018/07/16 03:13:30 UTC
[4/4] incubator-singa git commit: SINGA-382 Implement concat
operation for autograd
SINGA-382 Implement concat operation for autograd
Update the cnn_mnist example to use concat operation.
Project: http://git-wip-us.apache.org/repos/asf/incubator-singa/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-singa/commit/870c5df0
Tree: http://git-wip-us.apache.org/repos/asf/incubator-singa/tree/870c5df0
Diff: http://git-wip-us.apache.org/repos/asf/incubator-singa/diff/870c5df0
Branch: refs/heads/master
Commit: 870c5df0b9fa6eb87044b49e1013ef2f5a5298e1
Parents: e651c1a
Author: Wang Wei <wa...@gmail.com>
Authored: Mon Jul 16 10:11:07 2018 +0800
Committer: Wang Wei <wa...@gmail.com>
Committed: Mon Jul 16 10:11:07 2018 +0800
----------------------------------------------------------------------
examples/autograd/mnist_cnn.py | 24 +++++++++++++-----------
tool/conda/singa/meta.yaml | 1 +
2 files changed, 14 insertions(+), 11 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/870c5df0/examples/autograd/mnist_cnn.py
----------------------------------------------------------------------
diff --git a/examples/autograd/mnist_cnn.py b/examples/autograd/mnist_cnn.py
index 3ddd532..62ae5b2 100755
--- a/examples/autograd/mnist_cnn.py
+++ b/examples/autograd/mnist_cnn.py
@@ -24,7 +24,7 @@ import os
from singa import device
from singa import tensor
from singa import autograd
-from singa import optimizer
+from singa import opt
def load_data(path):
@@ -92,7 +92,7 @@ if __name__ == '__main__':
num_classes = 10
epochs = 1
- sgd = optimizer.SGD(0.001)
+ sgd = opt.SGD(lr=0.01)
x_train = preprocess(train[0])
y_train = to_categorical(train[1], num_classes)
@@ -105,14 +105,14 @@ if __name__ == '__main__':
print('the shape of testing label is', y_test.shape)
# operations initialization
- conv1 = autograd.Conv2D(1, 32, 3, padding=1, bias=False)
- bn1 = autograd.BatchNorm(32)
- conv21 = autograd.Conv2D(32, 16, 3, padding=1)
- conv22 = autograd.Conv2D(32, 16, 3, padding=1)
- bn2 = autograd.BatchNorm(32)
+ conv1 = autograd.Conv2d(1, 32, 3, padding=1, bias=False)
+ bn1 = autograd.BatchNorm2d(32)
+ conv21 = autograd.Conv2d(32, 16, 3, padding=1)
+ conv22 = autograd.Conv2d(32, 16, 3, padding=1)
+ bn2 = autograd.BatchNorm2d(32)
linear = autograd.Linear(32 * 28 * 28, 10)
- pooling1 = autograd.MaxPooling2D(3, 1, padding=1)
- pooling2 = autograd.AvgPooling2D(3, 1, padding=1)
+ pooling1 = autograd.MaxPool2d(3, 1, padding=1)
+ pooling2 = autograd.AvgPool2d(3, 1, padding=1)
def forward(x, t):
y = conv1(x)
@@ -121,7 +121,7 @@ if __name__ == '__main__':
y = pooling1(y)
y1 = conv21(y)
y2 = conv22(y)
- y = autograd.concat((y1, y2), 1)
+ y = autograd.cat((y1, y2), 1)
y = bn2(y)
y = autograd.relu(y)
y = bn2(y)
@@ -148,4 +148,6 @@ if __name__ == '__main__':
tensor.to_numpy(loss)[0])
for p, gp in autograd.backward(loss):
- sgd.apply(epoch, gp, p, '')
+ sgd.update(p, gp)
+
+ sgd.step()
http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/870c5df0/tool/conda/singa/meta.yaml
----------------------------------------------------------------------
diff --git a/tool/conda/singa/meta.yaml b/tool/conda/singa/meta.yaml
index ee76636..424532c 100644
--- a/tool/conda/singa/meta.yaml
+++ b/tool/conda/singa/meta.yaml
@@ -55,6 +55,7 @@ requirements:
- flask-cors >=3.0.2
- pillow >=2.3.0
- future >=0.16.0
+ - tqdm
test:
source_files: