You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by GitBox <gi...@apache.org> on 2018/05/13 17:20:06 UTC

[GitHub] zheng-da commented on a change in pull request #10921: Test cases improvement for MKLDNN on Gluon

zheng-da commented on a change in pull request #10921: Test cases improvement for MKLDNN on Gluon
URL: https://github.com/apache/incubator-mxnet/pull/10921#discussion_r187807475
 
 

 ##########
 File path: tests/python/mkl/test_mkldnn.py
 ##########
 @@ -95,122 +110,1147 @@ def __getitem__(self, key):
         assert_almost_equal(y[0, 0, 0, 0], 0.016711406)
         break
 
+def test_mkldnn_sum_inplace_with_cpu_layout():
+
+    x_shape = (32, 3, 224, 224)
+    x_npy = np.ones(x_shape)
+    y_shape = (32, 32, 222, 222)
+    y_npy = np.ones(y_shape)
+    x = mx.sym.Variable("x")
+    y = mx.sym.Variable("y")
+    z = mx.symbol.Convolution(data=x, num_filter=32, kernel=(3, 3))
+    z = mx.sym.add_n(z, y)
+    exe = z.simple_bind(ctx=mx.cpu(), x=x_shape, y=y_shape)
+    out = exe.forward(is_train=False, x=x_npy, y=y_npy)[0]
+    assert_almost_equal(out[0].asnumpy()[0, 0, 0], 1.0)
+
+@with_seed()
+def test_conv2d_mkldnn():
+    chn_list = [16, 32, 64, 128, 256, 512, 1024]
+    kernel_list = np.random.randint(low=1, high=224, size=9).tolist()
+    kernel_list.append(224)
+    batch_size = 32
+    class Net(gluon.HybridBlock):
+        def __init__(self,
+                     chn_num,
+                     kernel,
+                     **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                self.conv0 = gluon.nn.Conv2D(chn_num, (kernel, kernel))
+
+        def hybrid_forward(self, F, x):
+            out = self.conv0(x)
+            return out
+
+    x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, 3, 224, 224))
+    for i in range(len(chn_list)):
+        for j in range(len(kernel_list)):
+            net = Net(chn_list[i], kernel_list[j])
+            check_layer_forward(net, x)
 
 @with_seed()
-def test_reshape_before_conv():
+def test_batchnorm_mkldnn():
+    chn_list = [16, 32, 64, 128, 256, 512, 1024]
+    shape = np.random.randint(low=1, high=300, size=10)
+    shape_list = []
+    for i in range(len(shape)):
+        shape_list.append((shape[i], shape[i]))
+    batch_size = 32
+    class Net(gluon.HybridBlock):
+        def __init__(self,
+                     chn_num,
+                     kernel,
+                     axis,
+                     **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                self.conv0 = gluon.nn.Conv2D(chn_num, (kernel, kernel))
+                self.bn0   = gluon.nn.BatchNorm(axis=axis)
+
+        def hybrid_forward(self, F, x):
+            conv = self.conv0(x)
+            out = self.bn0(conv)
+            return out
+
+    for i in range(len(chn_list)):
+        for j in range(len(shape_list)):
+            shape = (batch_size, ) + (3,) + shape_list[j]
+            x = mx.nd.random.uniform(-1.0, 1.0, shape=shape)
+            net = Net(chn_list[i], 1, 1)
+            check_layer_forward(net, x)
+
+@with_seed()
+def test_concat_mkldnn():
+    chn_list = [16, 32, 64, 128, 256, 512, 1024]
+    input_num = np.random.randint(low=2, high=11)
+    shape = np.random.randint(low=1, high=300, size=10)
+    shape_list = []
+    for i in range(len(shape)):
+        shape_list.append((shape[i], shape[i]))
+    batch_size = 32
+    class Net(gluon.HybridBlock):
+        def __init__(self,
+                     check_dim,
+                     input_num,
+                     chn_num,
+                     kernel,
+                     **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                from mxnet.gluon.contrib.nn import HybridConcurrent
+                self.concat = HybridConcurrent(axis=check_dim)
+                for i in range(input_num):
+                    self.concat.add(gluon.nn.Conv2D(chn_num, (kernel, kernel)))
+
+        def hybrid_forward(self, F, x):
+            return self.concat(x)
+
+    for i in range(len(chn_list)):
+        for j in range(len(shape_list)):
+            shape = (batch_size,) + (3,) + shape_list[j]
+            x = mx.nd.random.uniform(-1.0, 1.0, shape=shape)
+            for axis in range(4):
+                net = Net(axis, input_num, chn_list[i], 1)
+                check_layer_forward(net, x)
+
+@with_seed()
+def test_reshape_conv():
+    class Net(gluon.HybridBlock):
+        def __init__(self, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                self.conv0 = nn.Conv2D(64, (3, 3))
+
+        def hybrid_forward(self, F, x):
+            x_reshape = x.reshape((0, 0, 448, 112))
+            out = self.conv0(x_reshape)
+            return out
+    x = mx.nd.random.uniform(shape=(32, 3, 224, 224))
+    net = Net()
+    check_layer_forward(net, x)
+
+
+@with_seed()
+def test_reshape_conv_reshape_conv():
     class Net(gluon.HybridBlock):
-        """
-        test Net
-        """
         def __init__(self, **kwargs):
             super(Net, self).__init__(**kwargs)
             with self.name_scope():
-                self.conv0 = nn.Conv2D(10, (3, 3))
-                self.conv1 = nn.Conv2D(5, (3, 3))
+                self.conv0 = nn.Conv2D(64, (3, 3))
+                self.conv1 = nn.Conv2D(256, (3, 3))
 
-        def hybrid_forward(self, F, x, *args, **kwargs):
-            x_reshape = x.reshape((0, 0, 20, 5))
+        def hybrid_forward(self, F, x):
+            x_reshape = x.reshape((0, 0, 448, 112))
             y = self.conv0(x_reshape)
-            y_reshape = y.reshape((0, 0, 9, 6))
+            y_reshape = y.reshape((0, 0, 223, 220))
             out = self.conv1(y_reshape)
             return out
-    x = mx.nd.random.uniform(shape=(2, 4, 10, 10))
-    x.attach_grad()
+    x = mx.nd.random.uniform(shape=(32, 3, 224, 224))
     net = Net()
-    net.collect_params().initialize()
-    with mx.autograd.record():
-        out1 = net(x)
-    out1.backward()
-    dx1 = x.grad
-    net.hybridize()
-    with mx.autograd.record():
-        out2 = net(x)
-    out2.backward()
-    mx.test_utils.assert_almost_equal(dx1.asnumpy(), x.grad.asnumpy(), rtol=1e-5, atol=1e-6)
-    mx.test_utils.assert_almost_equal(out1.asnumpy(), out2.asnumpy(), rtol=1e-5, atol=1e-6)
+    check_layer_forward(net, x)
 
 
 @with_seed()
-def test_slice_before_conv():
+def test_slice_conv():
     class Net(gluon.HybridBlock):
-        """
-        test Net
-        """
         def __init__(self, **kwargs):
             super(Net, self).__init__(**kwargs)
             with self.name_scope():
-                self.conv0 = nn.Conv2D(4, (3, 3))
-                self.conv1 = nn.Conv2D(4, (3, 3))
+                self.conv0 = nn.Conv2D(64, (3, 3))
 
-        def hybrid_forward(self, F, x, *args, **kwargs):
-            x_slice = x.slice(begin=(0, 0, 0, 0), end=(2, 4, 10, 10))
+        def hybrid_forward(self, F, x):
+            x_slice = x.slice(begin=(0, 2, 0, 0), end=(32, 5, 224, 224))
+            out = self.conv0(x_slice)
+            return out
+    x = mx.nd.random.uniform(shape=(32, 6, 224, 224))
+    net = Net()
+    check_layer_forward(net, x)
+
+
+@with_seed()
+def test_slice_conv_slice_conv():
+    class Net(gluon.HybridBlock):
+        def __init__(self, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                self.conv0 = nn.Conv2D(64, (3, 3))
+                self.conv1 = nn.Conv2D(256, (3, 3))
+
+        def hybrid_forward(self, F, x):
+            x_slice = x.slice(begin=(0, 2, 0, 0), end=(32, 5, 224, 224))
             y = self.conv0(x_slice)
-            y_slice = y.slice(begin=(1, 0, 2, 2), end=(2, 1, 7, 7))
+            y_slice = y.slice(begin=(0, 32, 0, 0), end=(32, 64, 222, 222))
             out = self.conv1(y_slice)
             return out
-    x = mx.nd.random.uniform(shape=(2, 10, 10, 10))
-    x.attach_grad()
+    x = mx.nd.random.uniform(shape=(32, 6, 224, 224))
     net = Net()
-    net.collect_params().initialize()
-    with mx.autograd.record():
-        out1 = net(x)
-    out1.backward()
-    dx1 = x.grad
-    net.hybridize()
-    with mx.autograd.record():
-        out2 = net(x)
-    out2.backward()
-    mx.test_utils.assert_almost_equal(dx1.asnumpy(), x.grad.asnumpy(), rtol=1e-5, atol=1e-6)
-    mx.test_utils.assert_almost_equal(out1.asnumpy(), out2.asnumpy(), rtol=1e-5, atol=1e-6)
+    check_layer_forward(net, x)
 
 
 @with_seed()
-def test_slice_reshape_before_conv():
+def test_slice_conv_reshape_conv():
     class Net(gluon.HybridBlock):
-        """
-        test Net
-        """
         def __init__(self, **kwargs):
             super(Net, self).__init__(**kwargs)
             with self.name_scope():
-                self.conv0 = nn.Conv2D(4, (3, 3))
-                self.conv1 = nn.Conv2D(4, (3, 3))
+                self.conv0 = nn.Conv2D(64, (3, 3))
+                self.conv1 = nn.Conv2D(256, (3, 3))
 
-        def hybrid_forward(self, F, x, *args, **kwargs):
-            x_slice = x.slice(begin=(0, 0, 0, 0), end=(2, 4, 8, 9))
+        def hybrid_forward(self, F, x):
+            x_slice = x.slice(begin=(0, 0, 1, 1), end=(32, 3, 225, 225))
             y = self.conv0(x_slice)
-            y_reshape = y.reshape((0, 0, 14, 3))
+            y_reshape = y.reshape((0, 0, 444, 111))
             out = self.conv1(y_reshape)
             return out
-    x = mx.nd.random.uniform(shape=(2, 10, 10, 10))
-    x.attach_grad()
+
+    x = mx.nd.random.uniform(shape=(32, 3, 299, 299))
     net = Net()
-    net.collect_params().initialize()
-    with mx.autograd.record():
-        out1 = net(x)
-    out1.backward()
-    dx1 = x.grad
-    net.hybridize()
-    with mx.autograd.record():
-        out2 = net(x)
-    out2.backward()
-    mx.test_utils.assert_almost_equal(dx1.asnumpy(), x.grad.asnumpy(), rtol=1e-5, atol=1e-6)
-    mx.test_utils.assert_almost_equal(out1.asnumpy(), out2.asnumpy(), rtol=1e-5, atol=1e-6)
+    check_layer_forward(net, x)
 
 
-def test_mkldnn_sum_inplace_with_cpu_layout():
+def test_reshape_conv_slice_conv():
+    """
+    This test will test gluon Conv2d computation on mkldnn with ndarray reshape and slice
+    """
+    class Net(gluon.HybridBlock):
+        def __init__(self, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                self.conv0 = nn.Conv2D(64, (3, 3))
+                self.conv1 = nn.Conv2D(256, (3, 3))
 
-    x_shape = (32, 3, 224, 224)
-    x_npy = np.ones(x_shape)
-    y_shape = (32, 32, 222, 222)
-    y_npy = np.ones(y_shape)
-    x = mx.sym.Variable("x")
-    y = mx.sym.Variable("y")
-    z = mx.symbol.Convolution(data=x, num_filter=32, kernel=(3, 3))
-    z = mx.sym.add_n(z, y)
-    exe = z.simple_bind(ctx=mx.cpu(), x=x_shape, y=y_shape)
-    out = exe.forward(is_train=False, x=x_npy, y=y_npy)[0]
-    assert_almost_equal(out[0].asnumpy()[0, 0, 0], 1.0)
+        def hybrid_forward(self, F, x):
+            x_reshape = x.reshape((0, 0, 448, 112))
+            y = self.conv0(x_reshape)
+            y_slice = y.slice(begin=(0, 32, 0, 0), end=(32, 64, 446, 110))
+            out = self.conv1(y_slice)
+            return out
+    x = mx.nd.random.uniform(shape=(32, 6, 224, 224))
+    net = Net()
+    check_layer_forward(net, x)
+
+
+@with_seed()
+def test_reshape_dense():
+    class Net(gluon.HybridBlock):
+        def __init__(self, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                channel0 = random.randint(1, 1000)
+                self.dense0 = nn.Dense(channel0)
+
+        def hybrid_forward(self, F, x):
+            x_reshape = x.reshape((8, 64, 600, -1))
+            out = self.dense0(x_reshape)
+            return out
+
+    x = mx.nd.random.uniform(shape=(16, 128, 300, 300))
+    net = Net()
+    check_layer_forward(net, x)
+
+
+@with_seed()
+def test_slice_dense():
+    class Net(gluon.HybridBlock):
+        def __init__(self, slice, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                channel0 = random.randint(1, 1000)
+                self.dense0 = nn.Dense(channel0)
+                self.slice = slice
+
+        def hybrid_forward(self, F, x):
+            x_slice = x.slice(begin=tuple(self.slice[0]),
+                              end=tuple(self.slice[1]))
+            out = self.dense0(x_slice)
+            return out
+
+    x = mx.nd.random.uniform(shape=(16, 128, 300, 300))
+    slice = [[0, 64, 50, 0], [8, 128, 300, 300]]
+    net = Net(slice)
+    check_layer_forward(net, x)
+
+
+@with_seed()
+def test_slice_dense_slice_dense():
+    class Net(gluon.HybridBlock):
+        def __init__(self, slice, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                channel0 = 50
+                channel1 = random.randint(1, 1000)
+                self.dense0 = nn.Dense(channel0)
+                self.dense1 = nn.Dense(channel1)
+                self.slice = slice
+
+        def hybrid_forward(self, F, x):
+            x_slice = x.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1]))
+            y = self.dense0(x_slice)
+            y_slice = y.slice(begin=(4, 0), end=(-1, 10))
+            out = self.dense1(y_slice)
+            return out
+
+    x = mx.nd.random.uniform(shape=(16, 128, 300, 300))
+    slice = [[0, 64, 50, 0], [8, 128, 300, 300]]
+    net = Net(slice)
+    check_layer_forward(net, x)
+
+
+@with_seed()
+def test_reshape_dense_reshape_dense():
+    class Net(gluon.HybridBlock):
+        def __init__(self, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                channel0 = random.randint(1, 1000)
+                channel1 = random.randint(1, 1000)
+                self.dense0 = nn.Dense(channel0)
+                self.dense1 = nn.Dense(channel1)
+
+        def hybrid_forward(self, F, x):
+            x_reshape = x.reshape((8, 64, 600, -1))
+            y = self.dense0(x_reshape)
+            y_reshape = y.reshape((1, -1))
+            out = self.dense1(y_reshape)
+            return out
+
+    x = mx.nd.random.uniform(shape=(16, 128, 300, 300))
+    net = Net()
+    check_layer_forward(net, x)
+
+
+@with_seed()
+def test_slice_dense_reshape_dense():
+    class Net(gluon.HybridBlock):
+        def __init__(self, slice, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                channel0 = random.randint(1, 1000)
+                channel1 = random.randint(1, 1000)
+                self.dense0 = nn.Dense(channel0)
+                self.dense1 = nn.Dense(channel1)
+                self.slice = slice
+
+        def hybrid_forward(self, F, x):
+            x_slice = x.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1]))
+            y = self.dense0(x_slice)
+            y_reshape = y.reshape((1, -1))
+            out = self.dense1(y_reshape)
+            return out
+
+    x = mx.nd.random.uniform(shape=(16, 128, 300, 300))
+    slice = [[0, 64, 50, 0], [8, 128, 300, 300]]
+    net = Net(slice)
+    check_layer_forward(net, x)
+
+
+@with_seed()
+def test_reshape_dense_slice_dense():
+    class Net(gluon.HybridBlock):
+        def __init__(self, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                channel0 = 800
+                channel1 = random.randint(1, 1000)
+                self.dense0 = nn.Dense(channel0)
+                self.dense1 = nn.Dense(channel1)
+
+        def hybrid_forward(self, F, x):
+            x_reshape = x.reshape((8, 64, 600, -1))
+            y = self.dense0(x_reshape)
+            y_slice = y.slice(begin=(0, 500), end=(8, 628))
+            out = self.dense1(y_slice)
+            return out
+
+    x = mx.nd.random.uniform(shape=(16, 128, 300, 300))
+    net = Net()
+    check_layer_forward(net, x)
+
+
+@with_seed()
+def test_reshape_batchnorm():
+    class Net(gluon.HybridBlock):
+        def __init__(self, shape, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                self.conv0 = nn.Conv2D(128, (1, 1))
+                self.bn0 = nn.BatchNorm()
+                self.reshape = shape
+
+        def hybrid_forward(self, F, x):
+            x_in = self.conv0(x)
+            x_reshape = x_in.reshape(self.reshape)
+            out = self.bn0(x_reshape)
+            return out
+
+    x = mx.nd.random.uniform(shape=(16, 128, 256, 256))
+    shape = (32, 512, 128, -1)
+    net = Net(shape)
+    check_layer_forward(net, x)
+
+
+@with_seed()
+def test_slice_batchnorm():
+    class Net(gluon.HybridBlock):
+        def __init__(self, slice, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                self.conv0 = nn.Conv2D(128, (1, 1))
+                self.bn0 = nn.BatchNorm(3)
+                self.slice = slice
+
+        def hybrid_forward(self, F, x):
+            x_in = self.conv0(x)
+            x_slice = x_in.slice(begin=tuple(self.slice[0]),
+                              end=tuple(self.slice[1]))
+            out = self.bn0(x_slice)
+            return out
+
+    x = mx.nd.random.uniform(shape=(16, 128, 256, 256))
+    slice = [[0, 64, 50, 0], [8, 128, 256, 256]]
+    net = Net(slice)
+    check_layer_forward(net, x)
+
+
+@with_seed()
+def test_slice_batchnorm_slice_batchnorm():
+    class Net(gluon.HybridBlock):
+        def __init__(self, slice, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                self.conv0 = nn.Conv2D(128, (1, 1))
+                self.bn0 = nn.BatchNorm(3)
+                self.bn1 = nn.BatchNorm(1)
+                self.slice = slice
+
+        def hybrid_forward(self, F, x):
+            x_in = self.conv0(x)
+            x_slice = x_in.slice(begin=tuple(self.slice[0][0]), end=tuple(self.slice[0][1]))
+            y = self.bn0(x_slice)
+            y_slice = y.slice(begin=tuple(self.slice[1][0]), end=tuple(self.slice[1][1]))
+            out = self.bn1(y_slice)
+            return out
+
+    x = mx.nd.random.uniform(shape=(16, 128, 256, 256))
+    slice = [[[0, 64, 50, 0], [8, 128, 200, 256]], [[4, 50, 0, 128], [7, -1, -1, -1]]]
+    net = Net(slice)
+    check_layer_forward(net, x)
+
+
+@with_seed()
+def test_reshape_batchnorm_reshape_batchnorm():
+    class Net(gluon.HybridBlock):
+        def __init__(self, shape, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                self.conv0 = nn.Conv2D(128, (1, 1))
+                self.bn0 = nn.BatchNorm(0)
+                self.bn1 = nn.BatchNorm(2)
+                self.reshape = shape
+
+        def hybrid_forward(self, F, x):
+            x_in = self.conv0(x)
+            x_reshape = x_in.reshape(self.reshape[0])
+            y = self.bn0(x_reshape)
+            y_reshape = y.reshape(self.reshape[1])
+            out = self.bn1(y_reshape)
+            return out
+
+    x = mx.nd.random.uniform(shape=(16, 128, 256, 512))
+    shape = [(8, 256, 128, -1), (32, 128, 512, -1)]
+    net = Net(shape)
+    check_layer_forward(net, x)
+
+
+@with_seed()
+def test_slice_batchnorm_reshape_batchnorm():
+    class Net(gluon.HybridBlock):
+        def __init__(self, shape, slice, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                self.conv0 = nn.Conv2D(128, (1, 1))
+                self.bn0 = nn.BatchNorm(0)
+                self.bn1 = nn.BatchNorm(2)
+                self.reshape = shape
+                self.slice = slice
+
+        def hybrid_forward(self, F, x):
+            x_in = self.conv0(x)
+            x_slice = x_in.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1]))
+            y = self.bn0(x_slice)
+            y_reshape = y.reshape(self.reshape)
+            out = self.bn1(y_reshape)
+            return out
+
+    x = mx.nd.random.uniform(shape=(16, 128, 256, 256))
+    slice = [[0, 64, 50, 0], [8, 128, 200, 256]]
+    shape = (1, 128, 256, -1)
+    net = Net(shape, slice)
+    check_layer_forward(net, x)
+
+
+@with_seed()
+def test_reshape_batchnorm_slice_batchnorm():
+    class Net(gluon.HybridBlock):
+        def __init__(self, shape, slice, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                self.conv0 = nn.Conv2D(128, (1, 1))
+                self.bn0 = nn.BatchNorm(2)
+                self.bn1 = nn.BatchNorm(0)
+                self.reshape = shape
+                self.slice = slice
+
+        def hybrid_forward(self, F, x):
+            x_in = self.conv0(x)
+            x_reshape = x_in.reshape(self.reshape)
+            y = self.bn0(x_reshape)
+            y_slice = y.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1]))
+            out = self.bn1(y_slice)
+            return out
+
+    x = mx.nd.random.uniform(shape=(16, 128, 256, 256))
+    slice = [[0, 0, 50, 0], [8, 1, -1, 100]]
+    shape = (128, 1, 256, -1)
+    net = Net(shape, slice)
+    check_layer_forward(net, x)
+
+def test_reshape_maxpooling():
+    class Net(gluon.HybridBlock):
+        def __init__(self, shape, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                self.reshape = shape
+                self.pool0 = nn.MaxPool2D(strides=(2, 3), padding=(1, 1))
+
+        def hybrid_forward(self, F, x):
+            x_reshape = x.reshape(self.reshape)
+            out = self.pool0(x_reshape)
+            return out
+
+    x = mx.nd.random.uniform(shape=(16, 128, 256, 256))
+    shape = (128, 256, 256, -1)
+    net = Net(shape)
+    check_layer_forward(net, x)
+
+
+def test_slice_maxpooling():
+    class Net(gluon.HybridBlock):
+        def __init__(self, slice, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                self.slice = slice
+                self.pool0 = nn.MaxPool2D(strides=(2, 2), padding=(1, 0))
+
+        def hybrid_forward(self, F, x):
+            x_slice = x.slice(begin=self.slice[0], end=self.slice[1])
+            out = self.pool0(x_slice)
+            return out
+
+    x = mx.nd.random.uniform(shape=(16, 128, 256, 256))
+    slice = [(12, 0, 128, 64), (16, 16, 256, 256)]
+    net = Net(slice)
+    check_layer_forward(net, x)
+
+
+def test_reshape_maxpooling_reshape_maxpooling():
+    class Net(gluon.HybridBlock):
+        def __init__(self, shape, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                self.reshape = shape
+                self.pool0 = nn.MaxPool2D(strides=(2, 3), padding=(1, 1))
+                self.pool1 = nn.MaxPool2D(strides=(2, 2), padding=(1, 0))
+
+        def hybrid_forward(self, F, x):
+            x_reshape = x.reshape(self.reshape[0])
+            y = self.pool0(x_reshape)
+            y_reshape = y.reshape(self.reshape[1])
+            out = self.pool1(y_reshape)
+            return out
+
+    x = mx.nd.random.uniform(shape=(16, 128, 256, 256))
+    shape = [(128, 256, 64, -1), (128, 256, 11, -1)]
+    net = Net(shape)
+    check_layer_forward(net, x)
+
+
+def test_slice_maxpooling_slice_maxpooling():
+    class Net(gluon.HybridBlock):
+        def __init__(self, slice, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                self.slice = slice
+                self.pool0 = nn.MaxPool2D(strides=(2, 3), padding=(1, 1))
+                self.pool1 = nn.MaxPool2D(strides=(2, 2), padding=(1, 0))
+
+        def hybrid_forward(self, F, x):
+            x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1])
+            y = self.pool0(x_slice)
+            y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1])
+            out = self.pool1(y_slice)
+            return out
+
+    x = mx.nd.random.uniform(shape=(16, 128, 256, 256))
+    slice = [[(8, 0, 100, 50), (16, -1, -1, -1)], [(0, 64, 0, 50), (2, -1, -1, -1)]]
+    net = Net(slice)
+    check_layer_forward(net, x)
+
+
+def test_slice_maxpooling_reshape_maxpooling():
+    class Net(gluon.HybridBlock):
+        def __init__(self, shape, slice, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                self.slice = slice
+                self.reshape = shape
+                self.pool0 = nn.MaxPool2D(strides=(2, 3), padding=(1, 1))
+                self.pool1 = nn.MaxPool2D(strides=(2, 2), padding=(1, 0))
+
+        def hybrid_forward(self, F, x):
+            x_slice = x.slice(begin=self.slice[0], end=self.slice[1])
+            y = self.pool0(x_slice)
+            y_reshape = y.reshape(self.reshape)
+            out = self.pool1(y_reshape)
+            return out
+
+    x = mx.nd.random.uniform(shape=(16, 128, 256, 256))
+    slice = [(8, 0, 100, 50), (16, 128, 256, 256)]
+    shape = (32, -1, 0, 0)
+    net = Net(shape, slice)
+    check_layer_forward(net, x)
+
+
+def test_reshape_maxpooling_slice_maxpooling():
+    class Net(gluon.HybridBlock):
+        def __init__(self, shape, slice, **kwargs):
+            super(Net, self).__init__(**kwargs)
+            with self.name_scope():
+                self.reshape = shape
+                self.slice = slice
+                self.pool0 = nn.MaxPool2D(strides=(2, 3), padding=(1, 1))
+                self.pool1 = nn.MaxPool2D(strides=(2, 2), padding=(1, 0))
+
+        def hybrid_forward(self, F, x):
+            x_reshape = x.reshape(self.reshape)
+            y = self.pool0(x_reshape)
+            y_slice = y.slice(begin=self.slice[0], end=self.slice[1])
+            out = self.pool1(y_slice)
+            return out
+
+    x = mx.nd.random.uniform(shape=(16, 128, 256, 256))
+    shape = (0, 512, 64, -1)
+    slice = [(8, 256, 10, 20), (-1, -1, -1, 70)]
+    net = Net(shape, slice)
+    check_layer_forward(net, x)
+
+
+def test_reshape_avgpooling():
 
 Review comment:
   shouldn't average pooling use the same operator as max pooling?

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services