You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by GitBox <gi...@apache.org> on 2019/12/13 05:42:07 UTC

[GitHub] [incubator-tvm] optima2005 commented on a change in pull request #4511: [CONV] Asymmetric padding

optima2005 commented on a change in pull request #4511: [CONV] Asymmetric padding
URL: https://github.com/apache/incubator-tvm/pull/4511#discussion_r357492814
 
 

 ##########
 File path: topi/tests/python/test_topi_conv2d_nchw.py
 ##########
 @@ -176,6 +184,20 @@ def test_conv2d_nchw():
     verify_conv2d_nchw(1,  512,   5, 126, 3, 1, 1)
     verify_conv2d_nchw(1,  256,   3, 126, 3, 1, 1)
 
+    # Asymmetric padding
+    verify_conv2d_nchw(1,   3, 224,  64, 7, 2, (0, 0, 1, 1))
+    verify_conv2d_nchw(1,  64,  56,  64, 3, 1, (3, 3, 2, 2))
+    verify_conv2d_nchw(1,  64,  56,  64, 1, 1, (1, 2, 2, 1))
+    verify_conv2d_nchw(1,  64,  56,  64, 1, 1, (1, 2))
+    verify_conv2d_nchw(1,  64,  56,  64, 3, 1, (3, 1))
+    verify_conv2d_nchw(1,  64,  56,  64, 3, 1, (0, 2))
+    verify_conv2d_nchw(1,  64,  56,  64, 3, 1, (1, 2), use_cudnn=True)
+    verify_conv2d_nchw(1,  64,  56,  64, 1, 1, "VALID")
+    verify_conv2d_nchw(1,  64,  56,  64, 3, 1, "VALID")
+    verify_conv2d_nchw(1,  64,  56,  64, 3, 1, "VALID", use_cudnn=True)
+    # Currnt not working
+    #verify_conv2d_nchw(1,  64,  56,  64, 1, 1, "SAME")
 
 Review comment:
   @FrozenGene I got below error when when enable this test, could you help? Thanks!
   ```
   >       verify_conv2d_nchw(1,  64,  56,  64, 1, 1, "SAME")
   
   topi/tests/python/test_topi_conv2d_nchw.py:196:
   _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
   topi/tests/python/test_topi_conv2d_nchw.py:91: in verify_conv2d_nchw
       check_device(device)
   topi/tests/python/test_topi_conv2d_nchw.py:70: in check_device
       (dilation, dilation), layout='NCHW', out_dtype=dtype)
   </host/root/ligc/tmp/conda/envs/myenv/lib/python3.5/site-packages/decorator.py:decorator-gen-34>:2: in conv2d
       ???
   python/tvm/target.py:382: in dispatch_func
       return dispatch_dict[k](*args, **kwargs)
   </host/root/ligc/tmp/conda/envs/myenv/lib/python3.5/site-packages/decorator.py:decorator-gen-153>:2: in config_dispatcher
       ???
   python/tvm/autotvm/task/dispatcher.py:216: in dispatch_func
       return dispatch_dict['direct'](cfg, *args, **kwargs)
   python/tvm/autotvm/task/topi_integration.py:385: in template_call
       node = f(cfg, *args, **kwargs)
   topi/python/topi/x86/conv2d.py:112: in _declaration_conv
       _get_default_config(cfg, data, kernel, strides, padding, out_dtype)
   topi/python/topi/x86/conv2d.py:57: in _get_default_config
       wkl = _get_conv2d_workload(data, kernel, strides, padding, out_dtype, layout)
   topi/python/topi/nn/conv2d.py:158: in _get_workload
       HPAD, WPAD, _, _ = get_pad_tuple(padding, kernel)
   topi/python/topi/nn/util.py:119: in get_pad_tuple
       pad_h = kernel[0] - 1
   python/tvm/expr.py:63: in __sub__
       return _generic.subtract(self, other)
   topi/python/topi/generic_op_impl.py:83: in _tensor_bop_impl
       return orig_bop(lhs, rhs)
   python/tvm/generic.py:62: in subtract
       return _make._OpSub(lhs, rhs)
   python/tvm/_ffi/_ctypes/function.py:201: in __call__
       values, tcodes, num_args = _make_tvm_args(args, temp_args)
   python/tvm/_ffi/_ctypes/function.py:148: in _make_tvm_args
       arg = convert_to_node(arg)
   python/tvm/_ffi/node_generic.py:91: in convert_to_node
       return value.asnode()
   python/tvm/tensor.py:42: in asnode
       return self.tensor(*self.indices)
   _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
   
   self = Tensor(shape=[64, 64, 1, 1], op.name=W), indices = (0,), ndim = 4
   
       def __call__(self, *indices):
           ndim = self.ndim
           if len(indices) != ndim:
   >           raise ValueError("Need to provide %d index in tensor slice" % ndim)
   E           ValueError: Need to provide 4 index in tensor slice
   
   python/tvm/tensor.py:64: ValueError
   ---------------------------------------------------------------- Captured stdout call -----------------------------------------------------------------
   Workload: (1, 64, 56, 64, 1, 1, 0, 1)
   Running on target: llvm
   ------------------------------------------------------------------ Captured log call ------------------------------------------------------------------
   WARNING  autotvm:dispatcher.py:386 Cannot find config for target=llvm, workload=('conv2d', (1, 64, 56, 56, 'float32'), (64, 64, 1, 1, 'float32'), (1, 1), 'SAME', (1, 1), 'NCHW', 'float32'). A fallback configuration is used, which may bring great performance regression.
   =========================================================== 1 failed, 1 deselected in 1.43s ===========================================================
   
   ```

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services