You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by GitBox <gi...@apache.org> on 2020/07/15 19:20:00 UTC

[GitHub] [incubator-tvm] sxjscience commented on a change in pull request #6054: [RELAY][MXNET][FRONTEND] add support for MXNET numpy operators

sxjscience commented on a change in pull request #6054:
URL: https://github.com/apache/incubator-tvm/pull/6054#discussion_r455285577



##########
File path: tests/python/frontend/mxnet/test_forward.py
##########
@@ -1372,6 +1373,246 @@ def verify(data_shape, anchor_shape, stds=[1, 1, 1, 1], clip=-1, in_format="corn
     verify((1, 10, 4), (1, 10, 4), clip=1)
     verify((1, 10, 4), (1, 10, 4), in_format="center")
 
+    
+def test_forward_npi_pad():
+    if not hasattr(mx.sym.np, 'pad'):
+        pytest.skip("mx.sym.np.pad hasn't been publish yet")
+
+    def verify(data_shape, out_shape, mode, pad_width, constant_value=0.0):
+        data_np = np.random.uniform(size=data_shape).astype("float32")
+        data = mx.sym.var('data')
+        if mode == 'constant':
+            ref_res = mx.ndarray.pad(mx.nd.array(data_np), mode=mode,pad_width=pad_width, constant_value=constant_value)
+            mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode, pad_width=pad_width, constant_values=constant_value)
+        else:
+            ref_res = mx.ndarray.pad(mx.nd.array(data_np), mode=mode,pad_width=pad_width)
+            mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode, pad_width=pad_width)
+        mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape})
+        for target, ctx in ctx_list():
+            for kind in ["debug"]:
+                intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target)
+                op_res = intrp.evaluate()(data_np)
+                tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5)
+
+    verify(data_shape=(1,1,3,5), out_shape=(1,1,6,12), mode="constant",
+           pad_width=(0,0,0,0,1,2,3,4))
+    verify(data_shape=(1,1,3,5), out_shape=(1,1,6,12), mode="constant",
+           pad_width=(0,0,0,0,1,2,3,4), constant_value=3.0)
+    verify(data_shape=(1,1,3,5), out_shape=(1,1,6,12), mode="edge",
+           pad_width=(0,0,0,0,1,2,3,4))
+    verify(data_shape=(1,1,3,5), out_shape=(1,1,6,12), mode="reflect",
+           pad_width=(0,0,0,0,1,2,3,4))
+    verify(data_shape=(1,1,3,5,7), out_shape=(1,1,6,12,18), mode="constant",
+           pad_width=(0,0,0,0,1,2,3,4,5,6))
+    verify(data_shape=(1,1,3,5,7), out_shape=(1,1,6,12,18), mode="constant",
+           pad_width=(0,0,0,0,1,2,3,4,5,6), constant_value=3.0)
+    verify(data_shape=(1,1,3,5,7), out_shape=(1,1,6,12,18), mode="edge",
+           pad_width=(0,0,0,0,1,2,3,4,5,6))
+    verify(data_shape=(1,1,3,5,7), out_shape=(1,1,6,12,18), mode="reflect",
+           pad_width=(0,0,0,0,1,2,3,4,5,6))
+
+
+def test_forward_npi_transpose():
+    def verify(data_shape, axes=None):
+        data_np = np.random.uniform(size=data_shape).astype("float32")
+        data = mx.sym.var('data')
+        ref_res = mx.np.transpose(mx.np.array(data_np), axes=axes)
+        mx_sym = mx.sym.np.transpose(data.as_np_ndarray(), axes=axes)
+        mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape})
+        for target, ctx in ctx_list():
+            for kind in ["graph", "vm", "debug"]:
+                intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target)
+                op_res = intrp.evaluate()(data_np)
+                tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5)
+
+    verify(data_shape=(2,2,2), axes=(1,0,2))
+    verify(data_shape=(2,7,2), axes=None)
+
+
+def test_forward_npi_concatenate():
+    def verify(data_shape1, data_shape2, axis=None):
+        data_np1 = np.random.uniform(size=data_shape1).astype("float32")
+        data_np2 = np.random.uniform(size=data_shape2).astype("float32")
+        data1 = mx.sym.var('data1')
+        data2 = mx.sym.var('data2')
+        ref_res = mx.np.concatenate([mx.np.array(data_np1), mx.np.array(data_np2)], axis=axis)
+        mx_sym = mx.sym.np.concatenate([data1.as_np_ndarray(), data2.as_np_ndarray()], axis=axis)
+        mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"data1": data_shape1, "data2": data_shape2})
+        for target, ctx in ctx_list():
+            for kind in ["graph", "vm", "debug"]:
+                intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target)
+                op_res = intrp.evaluate()(data_np1, data_np2)
+                tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5)
+    
+    verify(data_shape1=(2,2),data_shape2=(2,2),axis=1)
+    verify(data_shape1=(2,4),data_shape2=(2,3),axis=1)
+    verify(data_shape1=(1,3,2),data_shape2=(1,3,5),axis=2)
+    verify(data_shape1=(1,3,3),data_shape2=(1,3,3),axis=1)
+    verify(data_shape1=(1,3),data_shape2=(1,3),axis=0)
+    verify(data_shape1=(1,3,4),data_shape2=(1,3,4))
+    verify(data_shape1=(1,3,4),data_shape2=(1,3,4))
+
+
+def test_forward_np_copy():
+    def verify(data_shape, out_shape=None):
+        data_np = np.random.uniform(size=data_shape).astype("float32")
+        data = mx.sym.var('data')
+        ref_res = mx.np.copy(mx.np.array(data_np))
+        mx_sym = mx.sym.np.copy(data.as_np_ndarray())
+        mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape})
+        for target, ctx in ctx_list():
+            for kind in ["graph", "vm", "debug"]:
+                intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target)
+                op_res = intrp.evaluate()(data_np)
+                tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5)
+
+    verify(data_shape=(2,2,2))
+    verify(data_shape=(2,2,2,1,2,3,1))
+    verify(data_shape=(1,8))
+
+
+def test_forward_npx_reshape():
+    def verify(data_shape, out_shape, reverse=False):
+        data_np = np.random.uniform(size=data_shape).astype("float32")
+        data = mx.sym.var('data')
+        ref_res = mx.npx.reshape(mx.np.array(data_np), newshape=out_shape, reverse=reverse)
+        mx_sym = mx.sym.npx.reshape(data.as_np_ndarray(), newshape=out_shape, reverse=reverse)
+        mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape})
+        for target, ctx in ctx_list():
+            for kind in ["graph", "vm", "debug"]:
+                intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target)
+                op_res = intrp.evaluate()(data_np)
+                tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5)
+
+    verify(data_shape=(2, 3, 8), out_shape=(-2, -2, 2, -1))
+    verify(data_shape=(8, 3, 3, 3, 4, 4), out_shape=(-6, 2, -1, -4))
+    verify(data_shape=(8, 3, 3, 3, 4, 4), out_shape=(-5, -4))
+    verify(data_shape=(8, 3, 3, 3, 3, 8), out_shape=(-4, -5), reverse=True)
+    verify(data_shape=(8, 3, 2, 4, 8), out_shape=(-4, -1, 2, -6), reverse=True)
+
+
+def test_forward_npi_binary():
+    def verify(data_shape):
+        ref_ops = [mx.np.power, mx.np.multiply, mx.np.add, mx.np.less]
+        mx_ops = [mx.sym.np.power, mx.sym.np.multiply, mx.sym.np.add, mx.sym.np.less]
+        for i in range(len(ref_ops)):
+            ref_op = ref_ops[i]
+            mx_op = mx_ops[i]
+            data_np1 = np.random.uniform(size=data_shape).astype("float32")
+            data_np2 = np.random.uniform(size=data_shape).astype("float32")
+            data1 = mx.sym.var('lhs')
+            data2 = mx.sym.var('rhs')
+            ref_res = ref_op(mx.np.array(data_np1), mx.np.array(data_np2))
+            mx_sym = mx_op(data1.as_np_ndarray(), data2.as_np_ndarray())
+            mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"lhs": data_shape, "rhs": data_shape})
+            for target, ctx in ctx_list():
+                for kind in ["graph", "vm", "debug"]:
+                    intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target)
+                    op_res = intrp.evaluate()(data_np1, data_np2)
+                    tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5)
+    
+    verify(data_shape=(2,2))
+    verify(data_shape=(2,4))
+    verify(data_shape=(1,3,2))
+    verify(data_shape=(1,3,3))
+    verify(data_shape=(1,3))
+    verify(data_shape=(1,3,4))
+    verify(data_shape=(1,3,4))
+
+
+def test_forward_npi_binary_scalar():
+    def verify(data_shape, scalar):
+        ref_ops = [mx.np.power, mx.np.multiply, mx.np.add, mx.np.true_divide]
+        mx_ops = [mx.sym.np.power, mx.sym.np.multiply, mx.sym.np.add, mx.sym.np.true_divide]
+        for i in range(len(ref_ops)):
+            ref_op = ref_ops[i]
+            mx_op = mx_ops[i]
+            data_np1 = np.random.uniform(size=data_shape).astype("float32")
+            data1 = mx.sym.var('lhs')
+            ref_res = ref_op(mx.np.array(data_np1), scalar)
+            mx_sym = mx_op(data1.as_np_ndarray(), scalar)
+            mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"lhs": data_shape}, dtype="float32")
+            for target, ctx in ctx_list():
+                for kind in ["graph", "vm", "debug"]:
+                    intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target)
+                    op_res = intrp.evaluate()(data_np1)
+                    tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5)
+    
+    verify(data_shape=(2,2), scalar=1.0)
+    verify(data_shape=(2,4), scalar=2.0)
+    verify(data_shape=(1,3,2), scalar=3.0)
+    verify(data_shape=(1,3,3), scalar=4.0)
+
+
+def test_forward_npi_tanh():
+    def verify(data_shape):
+        data_np1 = np.random.uniform(size=data_shape).astype("float32")
+        data1 = mx.sym.var('data')
+        ref_res = mx.np.tanh(mx.np.array(data_np1))
+        mx_sym = mx.sym.np.tanh(data1.as_np_ndarray())
+        mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"data": data_shape}, dtype="float32")
+        for target, ctx in ctx_list():
+            for kind in ["graph", "vm", "debug"]:
+                intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target)
+                op_res = intrp.evaluate()(data_np1)
+                tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5)
+    
+    verify(data_shape=(2,2))
+    verify(data_shape=(2,4))
+    verify(data_shape=(1,3,2))
+    verify(data_shape=(1,3,3))
+
+
+def test_forward_npi_where_rscalar():
+    if not hasattr(mx.np, 'where'):
+        pytest.skip("mx.np.where hasn't been publish yet")
+
+    def verify(data_shape, scalar):
+        cond_np = np.random.uniform(size=data_shape).astype("bool")
+        data_np = np.random.uniform(size=data_shape).astype("float32")

Review comment:
       We may need to test for multiple dtypes. Especially for `cond_np`, we can have `bool` or `float32`.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org