You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by GitBox <gi...@apache.org> on 2020/04/08 21:15:18 UTC

[GitHub] [incubator-tvm] zhiics commented on a change in pull request #5272: [BYOC] Add example of Composite + Annotate for DNNL fused op

zhiics commented on a change in pull request #5272: [BYOC] Add example of Composite + Annotate for DNNL fused op
URL: https://github.com/apache/incubator-tvm/pull/5272#discussion_r405815328
 
 

 ##########
 File path: tests/python/relay/test_pass_partition_graph.py
 ##########
 @@ -856,6 +857,111 @@ def expected():
     partitioned = transform.PartitionGraph()(mod)
     assert tvm.ir.structural_equal(partitioned, ref_mod, map_free_vars=True)
 
+
+def test_partition_conv_bias_relu():
+    def make_pattern():
+        data = relay.var("data", relay.TensorType((1, 3, 224, 224), "float32"))
+        weight = relay.var("weight")
+        bias = relay.var("bias")
+        conv = relay.nn.conv2d(data=data, weight=weight, kernel_size=(3, 3),
+                               channels=8, padding=(1, 1))
+        add = relay.add(conv, bias)
+        return relay.nn.relu(add)
+
+    def get_blocks(prefix, data, in_channel, out_channel,
+                   include_bn=True, include_sigmoid=False):
+        weight = relay.var(prefix + "weight")
+        bn_gamma = relay.var(prefix + "bn_gamma")
+        bn_beta = relay.var(prefix + "bn_beta")
+        bn_mmean = relay.var(prefix + "bn_mean")
+        bn_mvar = relay.var(prefix + "bn_var")
+
+        layer = relay.nn.conv2d(data=data, weight=weight, kernel_size=(3, 3),
+                                channels=out_channel, padding=(1, 1))
+        if include_bn:
+            bn_output = relay.nn.batch_norm(layer, bn_gamma, bn_beta,
+                                            bn_mmean, bn_mvar)
+            layer = bn_output[0]
+        if include_sigmoid:
+            # dummy layer to prevent pattern detection
+            layer = relay.sigmoid(layer)
+        layer = relay.nn.relu(layer)
+        return layer
+
+    def get_net(include_bn=True, include_sigmoid=False):
+        data = relay.var("data", relay.TensorType((1, 3, 224, 224), "float32"))
+        layer1 = get_blocks("layer1_", data, 3, 8, include_bn, include_sigmoid)
+        layer2 = get_blocks("layer2_", layer1, 8, 8, include_bn, include_sigmoid)
+        return relay.Function(relay.analysis.free_vars(layer2), layer2)
+
+    def get_partitoned_mod(mod, params):
+        # This is required for constant folding
+        mod["main"] = bind_params_by_name(mod["main"], params)
+        pattern_table = [
+            ("dnnl.conv_bias_relu", make_pattern())
 
 Review comment:
   Yes, some common patterns could be moved to dnnl.py. Let's keep it here for now and have a separate PR to do that.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services