You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by GitBox <gi...@apache.org> on 2021/02/02 20:55:06 UTC

[GitHub] [tvm] codeislife99 commented on a change in pull request #7375: Support negative pad values

codeislife99 commented on a change in pull request #7375:
URL: https://github.com/apache/tvm/pull/7375#discussion_r568923480



##########
File path: tests/python/relay/test_op_level2.py
##########
@@ -1171,35 +1171,62 @@ def test_flatten_infer_type():
 
 @tvm.testing.uses_gpu
 def test_pad_infer_type():
-    # entirely concrete case
+    # entirely concrete cases
     n, c, h, w = 1, 2, 3, 4
     t = relay.var("t", relay.TensorType((n, c, h, w), "float32"))
     y = relay.nn.pad(t, ((1, 1), (2, 2), (3, 3), (4, 4)))
-    "pad_width=" in y.astext()
     yy = run_infer_type(y)
     assert yy.checked_type == relay.TensorType((3, 6, 9, 12), "float32")
 
+    n, c, h, w = 4, 6, 3, 5
+    t = relay.var("t", relay.TensorType((n, c, h, w), "float32"))
+    y = relay.nn.pad(t, ((-1, -1), (2, -2), (0, -3), (4, 4)), pad_mode="reflect")
+    yy = run_infer_type(y)
+    assert yy.checked_type == relay.TensorType((2, 6, 0, 13), "float32")
+
     # some symbolic values
     n, c, h, w = te.size_var("n"), 2, 3, te.size_var("w")
     t = relay.var("t", relay.TensorType((n, c, h, w), "float32"))
     y = relay.nn.pad(t, ((1, 1), (2, 2), (3, 3), (4, 4)))
     yy = run_infer_type(y)
     assert yy.checked_type == relay.TensorType((n + 2, 6, 9, w + 8), "float32")
 
+    n, c, h, w = te.size_var("n"), te.size_var("c"), te.size_var("h"), te.size_var("w")
+    t = relay.var("t", relay.TensorType((n, c, h, w), "float32"))
+    y = relay.nn.pad(t, ((-1, -1), (-2, -2), (1, -3), (4, 4)))
+    yy = run_infer_type(y)
+    assert yy.checked_type == relay.TensorType((n + (-2), c + (-4), h + (-2), w + 8), "float32")
+
 
 @tvm.testing.uses_gpu
 def test_pad_run():
     def _test_run(dtype):
-        dshape = (4, 10, 7, 7)
-        x = relay.var("x", shape=dshape)
-        y = relay.nn.pad(x, ((1, 1), (2, 2), (3, 3), (4, 4)))
-        func = relay.Function([x], y)
-        data = np.random.uniform(size=dshape).astype(dtype)
-        ref_res = np.pad(data, ((1, 1), (2, 2), (3, 3), (4, 4)), "constant")
-        for target, ctx in tvm.testing.enabled_targets():
-            intrp1 = relay.create_executor("graph", ctx=ctx, target=target)
-            op_res1 = intrp1.evaluate(func)(data)
-            tvm.testing.assert_allclose(op_res1.asnumpy(), ref_res, rtol=1e-5, atol=1e-5)
+        dshape_list = [(4, 10, 7, 7), (4, 6, 3, 5)]

Review comment:
       Thats a good point, resolved ! 




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org