You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@singa.apache.org by wa...@apache.org on 2018/08/16 08:44:32 UTC
[4/4] incubator-singa git commit: SINGA-387 Modified the design of
autograd backward engine and correct some mistakes in it
SINGA-387 Modified the design of autograd backward engine and correct some mistakes in it
- test the modified engine by running example networks in /example/autograd folder and fix some bugs. all files can run without error.
Project: http://git-wip-us.apache.org/repos/asf/incubator-singa/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-singa/commit/770d6cdb
Tree: http://git-wip-us.apache.org/repos/asf/incubator-singa/tree/770d6cdb
Diff: http://git-wip-us.apache.org/repos/asf/incubator-singa/diff/770d6cdb
Branch: refs/heads/master
Commit: 770d6cdb65ae528cfc9cae6e357198648c088168
Parents: b55b046
Author: xuewanqi <xu...@outlook.com>
Authored: Tue Aug 14 02:55:52 2018 +0000
Committer: xuewanqi <xu...@outlook.com>
Committed: Tue Aug 14 02:55:52 2018 +0000
----------------------------------------------------------------------
examples/autograd/resnet.py | 8 ++++----
python/singa/opt.py | 4 ++--
2 files changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/770d6cdb/examples/autograd/resnet.py
----------------------------------------------------------------------
diff --git a/examples/autograd/resnet.py b/examples/autograd/resnet.py
old mode 100644
new mode 100755
index 72c33ed..fab8129
--- a/examples/autograd/resnet.py
+++ b/examples/autograd/resnet.py
@@ -227,8 +227,8 @@ def resnet152(pretrained=False, **kwargs):
if __name__ == '__main__':
model = resnet18()
print('Start intialization............')
- dev = device.create_cuda_gpu_on(1)
-
+ dev = device.create_cuda_gpu_on(0)
+ #dev = device.create_cuda_gpu()
niters = 200
batch_size = 16
IMG_SIZE = 224
@@ -248,5 +248,5 @@ if __name__ == '__main__':
loss = autograd.softmax_cross_entropy(x, ty)
for p, g in autograd.backward(loss):
# print(p.shape, g.shape)
- # sgd.update(p, g)
- pass
+ sgd.update(p, g)
+ #pass
http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/770d6cdb/python/singa/opt.py
----------------------------------------------------------------------
diff --git a/python/singa/opt.py b/python/singa/opt.py
old mode 100644
new mode 100755
index 6c59f28..f744f57
--- a/python/singa/opt.py
+++ b/python/singa/opt.py
@@ -30,7 +30,7 @@ class Optimizer(object):
def __init__(self, config):
self.default_config = config
- self.step = 0
+ self.iter = 0
self.param2config = {}
self.param2state = {}
@@ -46,7 +46,7 @@ class Optimizer(object):
def step(self):
r"""To increment the step counter"""
- self.step += 1
+ self.iter += 1
def register(self, param_group, config):
for param in param_group: