You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by li...@apache.org on 2020/09/07 03:40:58 UTC

[incubator-tvm] branch master updated: iadd conv2d_transpose alter layout (#6358)

This is an automated email from the ASF dual-hosted git repository.

liuyizhi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-tvm.git


The following commit(s) were added to refs/heads/master by this push:
     new 4b48d89  iadd conv2d_transpose alter layout (#6358)
4b48d89 is described below

commit 4b48d89c79a72f7799606e845bdb1ed938baa115
Author: Beya2019 <49...@users.noreply.github.com>
AuthorDate: Mon Sep 7 11:40:42 2020 +0800

    iadd conv2d_transpose alter layout (#6358)
    
    Co-authored-by: honghua.cao <ho...@streamcomputing.com>
---
 python/tvm/relay/op/nn/_nn.py                     | 43 +++++++++++++++++
 tests/python/relay/test_pass_convert_op_layout.py | 58 ++++++++++++++++++++++-
 2 files changed, 100 insertions(+), 1 deletion(-)

diff --git a/python/tvm/relay/op/nn/_nn.py b/python/tvm/relay/op/nn/_nn.py
index 2f0966c..43fca6d 100644
--- a/python/tvm/relay/op/nn/_nn.py
+++ b/python/tvm/relay/op/nn/_nn.py
@@ -192,6 +192,49 @@ def legalize_conv2d_transpose(attrs, inputs, types):
     """
     return topi.nn.conv2d_transpose_legalize(attrs, inputs, types)
 
+@reg.register_convert_op_layout("nn.conv2d_transpose")
+def convert_conv2d_transpose(attrs, inputs, tinfos, desired_layouts):
+    """Convert Layout pass registration for conv2d_transpose op.
+
+    Parameters
+    ----------
+    attrs : tvm.ir.Attrs
+        Attributes of current convolution
+    inputs : list of tvm.relay.Expr
+        The args of the Relay expr to be legalized
+    tinfos : list of types
+        List of input and output types
+    desired_layouts : list of layout strings
+        List of layouts defining our desired
+        layout for the data and kernel inputs respectively.
+
+    Returns
+    -------
+    result : tvm.relay.Expr
+        The transformed expr
+    """
+    # pylint: disable=import-outside-toplevel
+    from tvm import relay
+    data, weight = inputs
+    new_attrs = dict(attrs)
+    assert len(desired_layouts) == 2, "A desired layout is expected for both of nn.conv2d's inputs"
+    desired_data_layout, desired_kernel_layout = map(str, desired_layouts)
+    assert desired_data_layout != "default", "Data layout cannot be default"
+    new_attrs['data_layout'] = desired_data_layout
+
+    if desired_kernel_layout != "default":
+        new_attrs['kernel_layout'] = desired_kernel_layout
+        return relay.nn.conv2d_transpose(data, weight, **new_attrs)
+
+    # Handle default kernel layouts
+    if desired_data_layout == 'NCHW':
+        new_attrs['kernel_layout'] = 'OIHW'
+        return relay.nn.conv2d_transpose(data, weight, **new_attrs)
+    elif desired_data_layout == 'NHWC':
+        new_attrs['kernel_layout'] = 'HWIO'
+        return relay.nn.conv2d_transpose(data, weight, **new_attrs)
+
+    raise ValueError("Layout %s is not yet supported." % desired_data_layout)
 
 # conv3d_transpose
 reg.register_strategy("nn.conv3d_transpose", strategy.conv3d_transpose_strategy)
diff --git a/tests/python/relay/test_pass_convert_op_layout.py b/tests/python/relay/test_pass_convert_op_layout.py
index f3cdbfc..aec758d 100644
--- a/tests/python/relay/test_pass_convert_op_layout.py
+++ b/tests/python/relay/test_pass_convert_op_layout.py
@@ -90,6 +90,41 @@ def test_conv_convert_layout():
     assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
 
 
+def test_conv_transpose_convert_layout():
+    def before():
+        x = relay.var("x", shape=(1, 56, 56, 64))
+        weight = relay.var('weight', shape=(3, 3, 64, 64))
+        y = relay.nn.conv2d_transpose(x, weight,
+                            channels=64,
+                            kernel_size=(3, 3),
+                            padding=(1, 1),
+                            data_layout='NHWC',
+                            kernel_layout='HWIO')
+        y = relay.nn.relu(y)
+        y = relay.Function([x, weight], y)
+        return y
+
+    def expected():
+        x = relay.var("x", shape=(1, 56, 56, 64))
+        weight = relay.var('weight', shape=(3, 3, 64, 64))
+        x = relay.layout_transform(x, 'NHWC', 'NCHW')
+        weight = relay.layout_transform(weight, 'HWIO', 'OIHW')
+        y = relay.nn.conv2d_transpose(x, weight,
+                            channels=64,
+                            kernel_size=(3, 3),
+                            padding=(1, 1))
+        y = relay.nn.relu(y)
+        y = relay.layout_transform(y, 'NCHW', 'NHWC')
+        y = relay.Function(relay.analysis.free_vars(y), y)
+        return y
+
+    a = before()
+    a = run_opt_pass(a, transform.ConvertLayout({'nn.conv2d_transpose': ['NCHW', 'OIHW']}))
+    b = run_opt_pass(expected(), transform.InferType())
+
+    assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
+
+
 def test_conv_bias_pool_convert_layout():
     def before():
         x = relay.var("x", shape=(1, 56, 56, 64))
@@ -680,6 +715,7 @@ def test_different_ops_convert_layout():
         x = relay.var("x", shape=(1, 64, 56, 56))
         weight1 = relay.var("weight1", shape=(64, 3, 3, 64))
         weight2 = relay.var("weight2", shape=(64, 3, 3, 64), dtype='int8')
+        weight3 = relay.var("weight3", shape=(64, 3, 3, 64))
         out = relay.nn.conv2d(x, weight1,
                               channels=64,
                               kernel_size=(3, 3),
@@ -697,6 +733,13 @@ def test_different_ops_convert_layout():
                                   padding=(1, 1),
                                   data_layout='NCHW',
                                   kernel_layout='OHWI')
+        out = relay.cast(out, 'float32')
+        out = relay.nn.conv2d_transpose(out, weight3,
+                              channels=64,
+                              kernel_size=(3, 3),
+                              padding=(1, 1),
+                              data_layout='NCHW',
+                              kernel_layout='OHWI')
         out = relay.Function(analysis.free_vars(out), out)
         return out
 
@@ -704,6 +747,7 @@ def test_different_ops_convert_layout():
         x = relay.var("x", shape=(1, 64, 56, 56))
         weight1 = relay.var("weight1", shape=(64, 3, 3, 64))
         weight2 = relay.var("weight2", shape=(64, 3, 3, 64), dtype='int8')
+        weight3 = relay.var("weight3", shape=(64, 3, 3, 64))
         x = relay.layout_transform(x, 'NCHW', 'NHWC')
         weight1 = relay.layout_transform(weight1, 'OHWI', 'HWIO')
         out = relay.nn.conv2d(x, weight1,
@@ -725,12 +769,23 @@ def test_different_ops_convert_layout():
                                   padding=(1, 1),
                                   data_layout='NCHW',
                                   kernel_layout='OIHW')
+        out = relay.cast(out, 'float32')
+        out = relay.layout_transform(out, 'NCHW', 'NHWC')
+        weight3 = relay.layout_transform(weight3, 'OHWI', 'HWIO')
+        out = relay.nn.conv2d_transpose(out, weight3,
+                              channels=64,
+                              kernel_size=(3, 3),
+                              padding=(1, 1),
+                              data_layout='NHWC',
+                              kernel_layout='HWIO')
+        out = relay.layout_transform(out, 'NHWC', 'NCHW')
         out = relay.Function(analysis.free_vars(out), out)
         return out
 
     a = before()
     desired_layouts = {'nn.conv2d': ['NHWC', 'HWIO'],
-                       'qnn.conv2d': ['NCHW', 'OIHW']}
+                       'qnn.conv2d': ['NCHW', 'OIHW'],
+                       'nn.conv2d_transpose': ['NHWC', 'HWIO'],}
     a = run_opt_pass(a, transform.ConvertLayout(desired_layouts))
     b = run_opt_pass(expected(), transform.InferType())
 
@@ -751,5 +806,6 @@ if __name__ == "__main__":
     test_qnn_conv_concat_convert_layout()
     test_qnn_conv_add_convert_layout()
     test_conv_convert_kernel_layout()
+    test_conv_transpose_convert_layout()
     test_default_keyword()
     test_different_ops_convert_layout()