You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@singa.apache.org by ka...@apache.org on 2018/07/13 07:47:07 UTC

[2/2] incubator-singa git commit: SINGA-382 Implement concat operation for autograd

SINGA-382 Implement concat operation for autograd

Fix the bug in calling C++ ConcatOn function by convert the input args into VecTensor type.


Project: http://git-wip-us.apache.org/repos/asf/incubator-singa/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-singa/commit/76779be7
Tree: http://git-wip-us.apache.org/repos/asf/incubator-singa/tree/76779be7
Diff: http://git-wip-us.apache.org/repos/asf/incubator-singa/diff/76779be7

Branch: refs/heads/master
Commit: 76779be72ef67de5aba6bdbc669f3252ab8e4104
Parents: 054f303
Author: wang wei <wa...@comp.nus.edu.sg>
Authored: Thu Jul 12 22:02:07 2018 +0800
Committer: Wang Wei <wa...@gmail.com>
Committed: Fri Jul 13 15:08:46 2018 +0800

----------------------------------------------------------------------
 examples/autograd/mnist_cnn.py |  9 ++++++---
 python/singa/autograd.py       | 12 +++++++-----
 2 files changed, 13 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/76779be7/examples/autograd/mnist_cnn.py
----------------------------------------------------------------------
diff --git a/examples/autograd/mnist_cnn.py b/examples/autograd/mnist_cnn.py
index 92fc43a..3ddd532 100755
--- a/examples/autograd/mnist_cnn.py
+++ b/examples/autograd/mnist_cnn.py
@@ -107,7 +107,8 @@ if __name__ == '__main__':
     # operations initialization
     conv1 = autograd.Conv2D(1, 32, 3, padding=1, bias=False)
     bn1 = autograd.BatchNorm(32)
-    conv2 = autograd.Conv2D(32, 32, 3, padding=1)
+    conv21 = autograd.Conv2D(32, 16, 3, padding=1)
+    conv22 = autograd.Conv2D(32, 16, 3, padding=1)
     bn2 = autograd.BatchNorm(32)
     linear = autograd.Linear(32 * 28 * 28, 10)
     pooling1 = autograd.MaxPooling2D(3, 1, padding=1)
@@ -118,8 +119,10 @@ if __name__ == '__main__':
         y = autograd.relu(y)
         y = bn1(y)
         y = pooling1(y)
-
-        y = conv2(y)
+        y1 = conv21(y)
+        y2 = conv22(y)
+        y = autograd.concat((y1, y2), 1)
+        y = bn2(y)
         y = autograd.relu(y)
         y = bn2(y)
         y = pooling2(y)

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/76779be7/python/singa/autograd.py
----------------------------------------------------------------------
diff --git a/python/singa/autograd.py b/python/singa/autograd.py
index c3986f3..faa9685 100755
--- a/python/singa/autograd.py
+++ b/python/singa/autograd.py
@@ -557,7 +557,8 @@ class Concat(Operation):
             for t in xs:
                 offset += t.shape()[self.axis]
                 self.slice_point.append(offset)
-        return singa.ConcatOn(xs, self.axis)
+        x = singa.VecTensor(list(xs))
+        return singa.ConcatOn(x, self.axis)
 
     def backward(self, dy):
         assert hasattr(
@@ -571,9 +572,9 @@ class Concat(Operation):
         return tuple(dxs)
 
 
-def concat(*xs):
-    # TODO changable axis
-    return Concat()(*xs)
+def concat(xs, axis=0):
+    # xs is a tuple of multiple Tensors
+    return Concat(axis)(*xs)[0]
 
 
 class _Conv2D(Operation):
@@ -741,7 +742,8 @@ class BatchNorm(Layer):
             shape=param_shape, requires_grad=False, stores_grad=False)
 
     def __call__(self, x):
-        assert x.shape[1] == self.channels, 'number of channels dismatched.'
+        assert x.shape[1] == self.channels, 'number of channels dismatched. %d vs %d' % (
+            x.shape[1], self.channels)
 
         self.device_check(x, self.scale, self.bias,
                           self.running_mean, self.running_var)