You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by GitBox <gi...@apache.org> on 2021/03/17 17:19:06 UTC

[GitHub] [tvm] comaniac commented on a change in pull request #7677: [Relay][Training][Pass] Factor out first-order AD to a module pass, and add ConcretizeLike pass

comaniac commented on a change in pull request #7677:
URL: https://github.com/apache/tvm/pull/7677#discussion_r596210282



##########
File path: src/relay/transforms/concretize_like.cc
##########
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*!
+ * \file concretize_like.cc
+ * \brief Converts `*_like` operators to their explicit shape equivalent (e.g. `zeros_like(x, y)` to
+ * `zeros(x, y.shape)`), when the target shape is concrete. This removes unnecessary dependencies
+ * and can enable more opportunities for operator fusion.
+ */
+#include <tvm/relay/dataflow_matcher.h>
+#include <tvm/relay/transform.h>
+
+#include "pattern_utils.h"
+
+namespace tvm {
+namespace relay {
+
+class ConcretizeLikeRewrite {
+ public:
+  ConcretizeLikeRewrite() {
+    concrete_map_[Op::Get("reshape_like")] = [](Expr data, Array<Integer> shape, DataType dtype) {
+      return MakeReshape(data, shape);
+    };
+    concrete_map_[Op::Get("zeros_like")] = [](Expr data, Array<Integer> shape, DataType dtype) {
+      return MakeZeros(shape, dtype);
+    };
+    concrete_map_[Op::Get("ones_like")] = [](Expr data, Array<Integer> shape, DataType dtype) {
+      return MakeOnes(shape, dtype);
+    };
+    concrete_map_[Op::Get("collapse_sum_like")] = [](Expr data, Array<Integer> shape,
+                                                     DataType dtype) {
+      ICHECK_LE(shape.size(), std::numeric_limits<int64_t>::max());
+      static const Op& op = Op::Get("collapse_sum_to");
+      auto attrs = make_object<InitOpAttrs>();
+      auto cshape =
+          MakeConstantTensor(DataType::Int(32), {static_cast<int64_t>(shape.size())}, shape);
+      attrs->shape = shape;
+      return Call(op, {data, cshape}, Attrs(attrs));
+    };
+    concrete_map_[Op::Get("broadcast_to_like")] = [](Expr data, Array<Integer> shape,
+                                                     DataType dtype) {
+      return MakeBroadCastTo(data, shape);
+    };
+
+    for (const auto& pr : concrete_map_) {
+      if (!op_pat_.defined()) {
+        op_pat_ = IsExpr(pr.first);
+      } else {
+        op_pat_ = op_pat_ || IsExpr(pr.first);
+      }
+    }

Review comment:
       1. Could you ellaborate what this loop does? It seems to me that it unions all the patterns that match ops in `concrete_map_`, but I didn't find `op_pat_` being used else where.
   2. The construction of `concrete_map_` could be static, so we should be able to move it out of the constructor.

##########
File path: src/relay/transforms/concretize_like.cc
##########
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*!
+ * \file concretize_like.cc
+ * \brief Converts `*_like` operators to their explicit shape equivalent (e.g. `zeros_like(x, y)` to
+ * `zeros(x, y.shape)`), when the target shape is concrete. This removes unnecessary dependencies
+ * and can enable more opportunities for operator fusion.
+ */
+#include <tvm/relay/dataflow_matcher.h>
+#include <tvm/relay/transform.h>
+
+#include "pattern_utils.h"
+
+namespace tvm {
+namespace relay {
+
+class ConcretizeLikeRewrite {
+ public:
+  ConcretizeLikeRewrite() {
+    concrete_map_[Op::Get("reshape_like")] = [](Expr data, Array<Integer> shape, DataType dtype) {
+      return MakeReshape(data, shape);
+    };
+    concrete_map_[Op::Get("zeros_like")] = [](Expr data, Array<Integer> shape, DataType dtype) {
+      return MakeZeros(shape, dtype);
+    };
+    concrete_map_[Op::Get("ones_like")] = [](Expr data, Array<Integer> shape, DataType dtype) {
+      return MakeOnes(shape, dtype);
+    };
+    concrete_map_[Op::Get("collapse_sum_like")] = [](Expr data, Array<Integer> shape,
+                                                     DataType dtype) {
+      ICHECK_LE(shape.size(), std::numeric_limits<int64_t>::max());
+      static const Op& op = Op::Get("collapse_sum_to");
+      auto attrs = make_object<InitOpAttrs>();
+      auto cshape =
+          MakeConstantTensor(DataType::Int(32), {static_cast<int64_t>(shape.size())}, shape);
+      attrs->shape = shape;
+      return Call(op, {data, cshape}, Attrs(attrs));
+    };
+    concrete_map_[Op::Get("broadcast_to_like")] = [](Expr data, Array<Integer> shape,
+                                                     DataType dtype) {
+      return MakeBroadCastTo(data, shape);
+    };
+
+    for (const auto& pr : concrete_map_) {
+      if (!op_pat_.defined()) {
+        op_pat_ = IsExpr(pr.first);
+      } else {
+        op_pat_ = op_pat_ || IsExpr(pr.first);
+      }
+    }
+
+    data_pat_ = IsWildcard();
+    like_pat_ = IsWildcard();
+    unary_like_pat_ = (IsOp("zeros_like") || IsOp("ones_like"))({like_pat_});
+    binary_like_pat_ = (IsOp("reshape_like") || IsOp("collapse_sum_like") ||
+                        IsOp("broadcast_to_like"))({data_pat_, like_pat_});
+  }
+
+  Expr Callback(const Expr& pre, const Expr& post,
+                const Map<DFPattern, Array<Expr>>& node_map) const {
+    // we will rewrite iff the like argument has fully concrete shape
+    const CallNode* call_node = post.as<CallNode>();
+    ICHECK(call_node);
+    const OpNode* op_node = call_node->op.as<OpNode>();
+    ICHECK(op_node);
+    const Op op_ref = GetRef<Op>(op_node);
+    ICHECK(concrete_map_.count(op_ref) > 0);
+
+    Expr like = node_map[like_pat_][0];
+
+    if (!like->checked_type_.defined()) {
+      // TODO(@altanh): maybe because of the input being rewritten?
+      return post;
+    }
+
+    // skip trying to support this for now (ironic, as I was the one who added the feature)
+    if (const auto* attrs = call_node->attrs.as<ReshapeLikeAttrs>()) {
+      if (attrs->lhs_begin != 0 || attrs->rhs_begin != 0 || attrs->lhs_end.defined() ||
+          attrs->rhs_end.defined()) {
+        return post;
+      }
+    }
+
+    CHECK(like->checked_type_.defined())
+        << "ConcretizeLike requires checked types to be populated, please run type inference";

Review comment:
       We already have L88-90 so this check seems useless.

##########
File path: src/relay/transforms/concretize_like.cc
##########
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*!
+ * \file concretize_like.cc
+ * \brief Converts `*_like` operators to their explicit shape equivalent (e.g. `zeros_like(x, y)` to
+ * `zeros(x, y.shape)`), when the target shape is concrete. This removes unnecessary dependencies
+ * and can enable more opportunities for operator fusion.
+ */
+#include <tvm/relay/dataflow_matcher.h>
+#include <tvm/relay/transform.h>
+
+#include "pattern_utils.h"
+
+namespace tvm {
+namespace relay {
+
+class ConcretizeLikeRewrite {
+ public:
+  ConcretizeLikeRewrite() {
+    concrete_map_[Op::Get("reshape_like")] = [](Expr data, Array<Integer> shape, DataType dtype) {
+      return MakeReshape(data, shape);
+    };
+    concrete_map_[Op::Get("zeros_like")] = [](Expr data, Array<Integer> shape, DataType dtype) {
+      return MakeZeros(shape, dtype);
+    };
+    concrete_map_[Op::Get("ones_like")] = [](Expr data, Array<Integer> shape, DataType dtype) {
+      return MakeOnes(shape, dtype);
+    };
+    concrete_map_[Op::Get("collapse_sum_like")] = [](Expr data, Array<Integer> shape,
+                                                     DataType dtype) {
+      ICHECK_LE(shape.size(), std::numeric_limits<int64_t>::max());
+      static const Op& op = Op::Get("collapse_sum_to");
+      auto attrs = make_object<InitOpAttrs>();
+      auto cshape =
+          MakeConstantTensor(DataType::Int(32), {static_cast<int64_t>(shape.size())}, shape);
+      attrs->shape = shape;
+      return Call(op, {data, cshape}, Attrs(attrs));
+    };
+    concrete_map_[Op::Get("broadcast_to_like")] = [](Expr data, Array<Integer> shape,
+                                                     DataType dtype) {
+      return MakeBroadCastTo(data, shape);
+    };
+
+    for (const auto& pr : concrete_map_) {
+      if (!op_pat_.defined()) {
+        op_pat_ = IsExpr(pr.first);
+      } else {
+        op_pat_ = op_pat_ || IsExpr(pr.first);
+      }
+    }
+
+    data_pat_ = IsWildcard();
+    like_pat_ = IsWildcard();
+    unary_like_pat_ = (IsOp("zeros_like") || IsOp("ones_like"))({like_pat_});
+    binary_like_pat_ = (IsOp("reshape_like") || IsOp("collapse_sum_like") ||
+                        IsOp("broadcast_to_like"))({data_pat_, like_pat_});
+  }
+
+  Expr Callback(const Expr& pre, const Expr& post,
+                const Map<DFPattern, Array<Expr>>& node_map) const {
+    // we will rewrite iff the like argument has fully concrete shape
+    const CallNode* call_node = post.as<CallNode>();
+    ICHECK(call_node);
+    const OpNode* op_node = call_node->op.as<OpNode>();
+    ICHECK(op_node);
+    const Op op_ref = GetRef<Op>(op_node);
+    ICHECK(concrete_map_.count(op_ref) > 0);
+
+    Expr like = node_map[like_pat_][0];
+
+    if (!like->checked_type_.defined()) {
+      // TODO(@altanh): maybe because of the input being rewritten?
+      return post;
+    }
+
+    // skip trying to support this for now (ironic, as I was the one who added the feature)
+    if (const auto* attrs = call_node->attrs.as<ReshapeLikeAttrs>()) {
+      if (attrs->lhs_begin != 0 || attrs->rhs_begin != 0 || attrs->lhs_end.defined() ||
+          attrs->rhs_end.defined()) {
+        return post;
+      }
+    }
+
+    CHECK(like->checked_type_.defined())
+        << "ConcretizeLike requires checked types to be populated, please run type inference";
+    const TensorTypeNode* like_ty = like->checked_type().as<TensorTypeNode>();
+    ICHECK(like_ty) << "got non-Tensor argument type " << PrettyPrint(like->checked_type());
+
+    Array<Integer> cshape;
+    for (const auto& dim : like_ty->shape) {
+      if (const auto* imm = dim.as<IntImmNode>()) {
+        cshape.push_back(Integer(GetRef<IntImm>(imm)));
+        continue;
+      }
+      return post;
+    }
+
+    if (call_node->args.size() == 2) {
+      return concrete_map_.at(op_ref)(node_map[data_pat_][0], cshape, like_ty->dtype);
+    }
+    return concrete_map_.at(op_ref)(Expr(), cshape, like_ty->dtype);

Review comment:
       It might be better to refer to SimplifyExpr pass to separate unary and binary ops. Then maybe we could have a base struct to put the sharable logic.

##########
File path: src/relay/transforms/concretize_like.cc
##########
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*!
+ * \file concretize_like.cc
+ * \brief Converts `*_like` operators to their explicit shape equivalent (e.g. `zeros_like(x, y)` to
+ * `zeros(x, y.shape)`), when the target shape is concrete. This removes unnecessary dependencies
+ * and can enable more opportunities for operator fusion.
+ */
+#include <tvm/relay/dataflow_matcher.h>
+#include <tvm/relay/transform.h>
+
+#include "pattern_utils.h"
+
+namespace tvm {
+namespace relay {
+
+class ConcretizeLikeRewrite {
+ public:
+  ConcretizeLikeRewrite() {
+    concrete_map_[Op::Get("reshape_like")] = [](Expr data, Array<Integer> shape, DataType dtype) {
+      return MakeReshape(data, shape);
+    };
+    concrete_map_[Op::Get("zeros_like")] = [](Expr data, Array<Integer> shape, DataType dtype) {
+      return MakeZeros(shape, dtype);
+    };
+    concrete_map_[Op::Get("ones_like")] = [](Expr data, Array<Integer> shape, DataType dtype) {
+      return MakeOnes(shape, dtype);
+    };
+    concrete_map_[Op::Get("collapse_sum_like")] = [](Expr data, Array<Integer> shape,
+                                                     DataType dtype) {
+      ICHECK_LE(shape.size(), std::numeric_limits<int64_t>::max());
+      static const Op& op = Op::Get("collapse_sum_to");
+      auto attrs = make_object<InitOpAttrs>();
+      auto cshape =
+          MakeConstantTensor(DataType::Int(32), {static_cast<int64_t>(shape.size())}, shape);
+      attrs->shape = shape;
+      return Call(op, {data, cshape}, Attrs(attrs));
+    };
+    concrete_map_[Op::Get("broadcast_to_like")] = [](Expr data, Array<Integer> shape,
+                                                     DataType dtype) {
+      return MakeBroadCastTo(data, shape);
+    };
+
+    for (const auto& pr : concrete_map_) {
+      if (!op_pat_.defined()) {
+        op_pat_ = IsExpr(pr.first);
+      } else {
+        op_pat_ = op_pat_ || IsExpr(pr.first);
+      }
+    }
+
+    data_pat_ = IsWildcard();
+    like_pat_ = IsWildcard();
+    unary_like_pat_ = (IsOp("zeros_like") || IsOp("ones_like"))({like_pat_});
+    binary_like_pat_ = (IsOp("reshape_like") || IsOp("collapse_sum_like") ||
+                        IsOp("broadcast_to_like"))({data_pat_, like_pat_});
+  }
+
+  Expr Callback(const Expr& pre, const Expr& post,
+                const Map<DFPattern, Array<Expr>>& node_map) const {
+    // we will rewrite iff the like argument has fully concrete shape
+    const CallNode* call_node = post.as<CallNode>();
+    ICHECK(call_node);
+    const OpNode* op_node = call_node->op.as<OpNode>();
+    ICHECK(op_node);
+    const Op op_ref = GetRef<Op>(op_node);
+    ICHECK(concrete_map_.count(op_ref) > 0);
+
+    Expr like = node_map[like_pat_][0];
+
+    if (!like->checked_type_.defined()) {
+      // TODO(@altanh): maybe because of the input being rewritten?
+      return post;
+    }
+
+    // skip trying to support this for now (ironic, as I was the one who added the feature)
+    if (const auto* attrs = call_node->attrs.as<ReshapeLikeAttrs>()) {
+      if (attrs->lhs_begin != 0 || attrs->rhs_begin != 0 || attrs->lhs_end.defined() ||
+          attrs->rhs_end.defined()) {
+        return post;
+      }
+    }

Review comment:
       This is too ad-hoc. It means we may not concretize *like ops in certain situations. Instead of hacking the unified callabck function, we should maintain this logic in the op specific function. I can think of two ways to achieve this:
   1. Put the logic in the beginning of `concrete_map_` functions and return the same op if not applicable.
   2. Construct another checker map that include checker functions of each op, and invoke the corresponding checker function here.

##########
File path: tests/python/relay/test_pass_concretize_like.py
##########
@@ -0,0 +1,108 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Tests for the ConcretizeLike pass."""
+import tvm
+import tvm.relay.testing
+from tvm import relay
+from tvm.relay.testing import run_infer_type
+
+
+def test_reshape_like():
+    data = relay.var("data", shape=(2, 3, 4), dtype="float32")
+    shape_like = relay.var("shape_like", shape=(6, 2, 2), dtype="float32")
+    f = relay.Function([data, shape_like], relay.reshape_like(data, shape_like))
+    f_expected = relay.Function([data, shape_like], relay.reshape(data, (6, 2, 2)))

Review comment:
       This exposes a question of the current implementation: after the ConcretizeLike pass we expect unused arguments. If we don't need shape_like tensor anymore, we should remove it from the function.

##########
File path: src/relay/transforms/concretize_like.cc
##########
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*!
+ * \file concretize_like.cc
+ * \brief Converts `*_like` operators to their explicit shape equivalent (e.g. `zeros_like(x, y)` to
+ * `zeros(x, y.shape)`), when the target shape is concrete. This removes unnecessary dependencies
+ * and can enable more opportunities for operator fusion.
+ */
+#include <tvm/relay/dataflow_matcher.h>
+#include <tvm/relay/transform.h>
+
+#include "pattern_utils.h"
+
+namespace tvm {
+namespace relay {
+
+class ConcretizeLikeRewrite {
+ public:
+  ConcretizeLikeRewrite() {
+    concrete_map_[Op::Get("reshape_like")] = [](Expr data, Array<Integer> shape, DataType dtype) {
+      return MakeReshape(data, shape);
+    };
+    concrete_map_[Op::Get("zeros_like")] = [](Expr data, Array<Integer> shape, DataType dtype) {
+      return MakeZeros(shape, dtype);
+    };
+    concrete_map_[Op::Get("ones_like")] = [](Expr data, Array<Integer> shape, DataType dtype) {
+      return MakeOnes(shape, dtype);
+    };
+    concrete_map_[Op::Get("collapse_sum_like")] = [](Expr data, Array<Integer> shape,
+                                                     DataType dtype) {
+      ICHECK_LE(shape.size(), std::numeric_limits<int64_t>::max());
+      static const Op& op = Op::Get("collapse_sum_to");
+      auto attrs = make_object<InitOpAttrs>();
+      auto cshape =
+          MakeConstantTensor(DataType::Int(32), {static_cast<int64_t>(shape.size())}, shape);
+      attrs->shape = shape;
+      return Call(op, {data, cshape}, Attrs(attrs));
+    };
+    concrete_map_[Op::Get("broadcast_to_like")] = [](Expr data, Array<Integer> shape,
+                                                     DataType dtype) {
+      return MakeBroadCastTo(data, shape);
+    };
+
+    for (const auto& pr : concrete_map_) {
+      if (!op_pat_.defined()) {
+        op_pat_ = IsExpr(pr.first);
+      } else {
+        op_pat_ = op_pat_ || IsExpr(pr.first);
+      }
+    }
+
+    data_pat_ = IsWildcard();
+    like_pat_ = IsWildcard();
+    unary_like_pat_ = (IsOp("zeros_like") || IsOp("ones_like"))({like_pat_});
+    binary_like_pat_ = (IsOp("reshape_like") || IsOp("collapse_sum_like") ||
+                        IsOp("broadcast_to_like"))({data_pat_, like_pat_});

Review comment:
       1. These patterns can also be defined statically.
   2. Now we have two places to specify the supported *like ops. I feel we should define a list of "unary like" and "binary like" ops once and used them both in `concrete_map_` and here. As a result, we could have the logic similar to L61-67 to construct the pattern.

##########
File path: tests/python/relay/test_pass_concretize_like.py
##########
@@ -0,0 +1,108 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Tests for the ConcretizeLike pass."""
+import tvm
+import tvm.relay.testing
+from tvm import relay
+from tvm.relay.testing import run_infer_type
+
+
+def test_reshape_like():
+    data = relay.var("data", shape=(2, 3, 4), dtype="float32")
+    shape_like = relay.var("shape_like", shape=(6, 2, 2), dtype="float32")
+    f = relay.Function([data, shape_like], relay.reshape_like(data, shape_like))
+    f_expected = relay.Function([data, shape_like], relay.reshape(data, (6, 2, 2)))
+    f_expected = run_infer_type(f_expected)
+
+    mod = tvm.IRModule.from_expr(f)
+    mod_concrete = relay.transform.ConcretizeLike()(mod)
+    assert tvm.ir.structural_equal(mod_concrete["main"], f_expected)
+
+
+def test_zeros_like():
+    dtype = "int32"
+    shape_like = relay.var("shape_like", shape=(3, 4, 5), dtype=dtype)
+    f = relay.Function([shape_like], relay.zeros_like(shape_like))
+    f_expected = relay.Function([shape_like], relay.zeros((3, 4, 5), dtype))
+    f_expected = run_infer_type(f_expected)
+
+    mod = tvm.IRModule.from_expr(f)
+    mod_concrete = relay.transform.ConcretizeLike()(mod)
+    assert tvm.ir.structural_equal(mod_concrete["main"], f_expected)
+
+
+def test_ones_like():
+    dtype = "int32"
+    shape_like = relay.var("shape_like", shape=(3, 4, 5), dtype=dtype)
+    f = relay.Function([shape_like], relay.ones_like(shape_like))
+    f_expected = relay.Function([shape_like], relay.ones((3, 4, 5), dtype))
+    f_expected = run_infer_type(f_expected)
+
+    mod = tvm.IRModule.from_expr(f)
+    mod_concrete = relay.transform.ConcretizeLike()(mod)
+    assert tvm.ir.structural_equal(mod_concrete["main"], f_expected)
+
+
+def test_collapse_sum_like():
+    data = relay.var("data", shape=(3, 3, 3), dtype="float32")
+    shape_like = relay.var("shape_like", shape=(3,), dtype="float32")
+    f = relay.Function([data, shape_like], relay.collapse_sum_like(data, shape_like))
+    f_expected = relay.Function([data, shape_like], relay.collapse_sum_to(data, (3,)))
+    f_expected = run_infer_type(f_expected)
+
+    mod = tvm.IRModule.from_expr(f)
+    mod_concrete = relay.transform.ConcretizeLike()(mod)
+    assert tvm.ir.structural_equal(mod_concrete["main"], f_expected)
+
+
+def test_broadcast_to_like():
+    data = relay.var("data", shape=(3,), dtype="float32")
+    shape_like = relay.var("shape_like", shape=(3, 3, 3), dtype="float32")
+    f = relay.Function([data, shape_like], relay.broadcast_to_like(data, shape_like))
+    f_expected = relay.Function([data, shape_like], relay.broadcast_to(data, (3, 3, 3)))
+    f_expected = run_infer_type(f_expected)
+
+    mod = tvm.IRModule.from_expr(f)
+    mod_concrete = relay.transform.ConcretizeLike()(mod)
+    assert tvm.ir.structural_equal(mod_concrete["main"], f_expected)
+
+
+def test_multiple():
+    x = relay.var("x", shape=(2, 3), dtype="float32")
+    y = relay.var("x", shape=(3,), dtype="float32")
+    l = x + y
+
+    dl = relay.ones_like(l)
+    dx = relay.zeros_like(x)
+    dy = relay.zeros_like(y)
+    dx = dx + relay.collapse_sum_like(dl, dx)
+    dy = dy + relay.collapse_sum_like(dl, dy)
+    ret = relay.Tuple([dx, dy])
+    f = relay.Function([x, y], ret)
+
+    dl_c = relay.ones((2, 3), "float32")
+    dx_c = relay.zeros((2, 3), "float32")
+    dy_c = relay.zeros((3,), "float32")
+    dx_c = dx_c + relay.collapse_sum_to(dl_c, (2, 3))
+    dy_c = dy_c + relay.collapse_sum_to(dl_c, (3,))
+    ret_c = relay.Tuple([dx_c, dy_c])
+    f_expected = relay.Function([x, y], ret_c)
+    f_expected = run_infer_type(f_expected)
+
+    mod = tvm.IRModule.from_expr(f)
+    mod_concrete = relay.transform.ConcretizeLike()(mod)
+    assert tvm.ir.structural_equal(mod_concrete["main"], f_expected)

Review comment:
       Add `if __name__ == "__main__":`




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org