You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by ma...@apache.org on 2020/09/16 08:55:31 UTC

[incubator-tvm] branch master updated: [BUG][ConvertLayout] Fix qnn.conv2d layout conversion too many values to unpack (#6442)

This is an automated email from the ASF dual-hosted git repository.

masahi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-tvm.git


The following commit(s) were added to refs/heads/master by this push:
     new bdfefbb  [BUG][ConvertLayout] Fix qnn.conv2d layout conversion too many values to unpack (#6442)
bdfefbb is described below

commit bdfefbb03f5aab96ee677ee28a166dd6ab5dbf3f
Author: lhutton1 <35...@users.noreply.github.com>
AuthorDate: Wed Sep 16 09:55:13 2020 +0100

    [BUG][ConvertLayout] Fix qnn.conv2d layout conversion too many values to unpack (#6442)
    
    This patch follows a previous bugfix in #6419. I made a very simple oversight for qnn.conv2d in that tinfos also contains qnn parameters. Therefore, we need to extract data_info and weight_info differently.
    
    Change-Id: Ib0ad01f427543371380d0bb604a77b5e0ec1103d
---
 python/tvm/relay/qnn/op/layout_conversions.py     |  3 +-
 tests/python/relay/test_pass_convert_op_layout.py | 46 +++++++++++++++++++++++
 2 files changed, 48 insertions(+), 1 deletion(-)

diff --git a/python/tvm/relay/qnn/op/layout_conversions.py b/python/tvm/relay/qnn/op/layout_conversions.py
index 4105172..a7c90da 100644
--- a/python/tvm/relay/qnn/op/layout_conversions.py
+++ b/python/tvm/relay/qnn/op/layout_conversions.py
@@ -63,7 +63,8 @@ def convert_qnn_conv2d(attrs, inputs, tinfos, desired_layouts):
         return relay.qnn.op.conv2d(*inputs, **new_attrs)
     if desired_data_layout == "NHWC":
         # Check for depthwise convolution.
-        data_info, weight_info = tinfos
+        data_info = tinfos[0]
+        weight_info = tinfos[1]
         if is_depthwise_conv2d(
             data_info.shape,
             attrs["data_layout"],
diff --git a/tests/python/relay/test_pass_convert_op_layout.py b/tests/python/relay/test_pass_convert_op_layout.py
index e4771a0..d2a1329 100644
--- a/tests/python/relay/test_pass_convert_op_layout.py
+++ b/tests/python/relay/test_pass_convert_op_layout.py
@@ -749,6 +749,51 @@ def test_qnn_conv_add_convert_layout():
     assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
 
 
+def test_qnn_conv_nhwc_convert_layout():
+    def before():
+        x = relay.var("x", shape=(1, 64, 56, 56), dtype='int8')
+        weight = relay.var('weight', shape=(64, 64, 3, 3), dtype='int8')
+        y = relay.qnn.op.conv2d(x, weight,
+                                relay.const(1, 'int32'),
+                                relay.const(1, 'int32'),
+                                relay.const(1, 'float32'),
+                                relay.const(1, 'float32'),
+                                channels=64,
+                                kernel_size=(3, 3),
+                                padding=(1, 1),
+                                data_layout='NCHW',
+                                kernel_layout='OIHW')
+        y = relay.nn.relu(y)
+        y = relay.Function([x, weight], y)
+        return y
+
+    def expected():
+        x = relay.var("x", shape=(1, 64, 56, 56), dtype='int8')
+        weight = relay.var('weight', shape=(64, 64, 3, 3), dtype='int8')
+        x = relay.layout_transform(x, 'NCHW', 'NHWC')
+        weight = relay.layout_transform(weight, 'OIHW', 'HWIO')
+        y = relay.qnn.op.conv2d(x, weight,
+                                relay.const(1, 'int32'),
+                                relay.const(1, 'int32'),
+                                relay.const(1, 'float32'),
+                                relay.const(1, 'float32'),
+                                channels=64,
+                                kernel_size=(3, 3),
+                                padding=(1, 1),
+                                data_layout="NHWC",
+                                kernel_layout="HWIO")
+        y = relay.nn.relu(y)
+        y = relay.layout_transform(y, 'NHWC', 'NCHW')
+        y = relay.Function(relay.analysis.free_vars(y), y)
+        return y
+
+    a = before()
+    a = run_opt_pass(a, transform.ConvertLayout({'qnn.conv2d': ['NHWC', 'default']}))
+    b = run_opt_pass(expected(), transform.InferType())
+
+    assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
+
+
 def test_conv_convert_kernel_layout():
     """ Check that convolution kernel layout is correctly transformed. """
 
@@ -951,6 +996,7 @@ if __name__ == "__main__":
     test_qnn_conv_requantize_convert_layout()
     test_qnn_conv_concat_convert_layout()
     test_qnn_conv_add_convert_layout()
+    test_qnn_conv_nhwc_convert_layout()
     test_conv_convert_kernel_layout()
     test_conv_transpose_convert_layout()
     test_default_keyword()