You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by an...@apache.org on 2022/09/17 00:01:22 UTC

[tvm] 04/18: optional cast

This is an automated email from the ASF dual-hosted git repository.

andrewzhaoluo pushed a commit to branch aluo/rebase-09162022-autotensorization
in repository https://gitbox.apache.org/repos/asf/tvm.git

commit 0e35324ab79f504e5921cf135b0cf0a57768a044
Author: Andrew Zhao Luo <an...@gmail.com>
AuthorDate: Fri Sep 2 12:37:50 2022 -0700

    optional cast
---
 python/tvm/relay/op/contrib/dnnl.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/python/tvm/relay/op/contrib/dnnl.py b/python/tvm/relay/op/contrib/dnnl.py
index e27449ac43..67909b04b8 100644
--- a/python/tvm/relay/op/contrib/dnnl.py
+++ b/python/tvm/relay/op/contrib/dnnl.py
@@ -831,7 +831,7 @@ class LayerNormRewritePattern1(DFPatternCallback):
         self.beta = wildcard()
         mu = is_op("mean")(self.data)
         diff = is_op("subtract")(self.data, mu)
-        cdiff = is_op("cast")(diff)
+        cdiff = is_op("cast")(diff) | diff  # cast does not need to be here usually
         const_two = (
             is_expr(relay.const(2))
             | is_expr(relay.const(2.0))