You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by GitBox <gi...@apache.org> on 2020/01/30 19:05:12 UTC

[GitHub] [incubator-tvm] icemelon9 commented on a change in pull request #4644: [WIP] Relay op strategy

icemelon9 commented on a change in pull request #4644: [WIP] Relay op strategy
URL: https://github.com/apache/incubator-tvm/pull/4644#discussion_r373136444
 
 

 ##########
 File path: python/tvm/relay/backend/compile_engine.py
 ##########
 @@ -63,6 +83,316 @@ def _get_cache_key(source_func, target):
     return source_func
 
 
+def get_shape(shape):
+    """Convert the shape to correct dtype and vars."""
+    ret = []
+    for dim in shape:
+        if isinstance(dim, tvm.expr.IntImm):
+            val = int(dim)
+            assert val <= np.iinfo(np.int32).max
+            ret.append(tvm.expr.IntImm("int32", val))
+        elif isinstance(dim, tvm.expr.Any):
+            ret.append(tvm.var("any_dim", "int32"))
+        else:
+            ret.append(dim)
+    return ret
+
+
+def get_valid_implements(op, attrs, inputs, out_type, target):
+    """Get all valid implementations from the op strategy.
+
+    Note that this function doesn't support op that has symbolic input shapes.
+
+    Parameters
+    ----------
+    op : relay.op.Op
+        Relay operator.
+
+    attrs : object
+        The op attribute.
+
+    inputs : list of tvm.Tensor
+        Input tensors to the op.
+
+    out_type : relay.Type
+        The output type.
+
+    target : tvm.Target
+        The target to compile the op.
+
+    Returns
+    -------
+    ret : list of relay.op.OpImplement
+        The list of op implementations.
+    """
+    fstrategy = op.get_attr("FTVMStrategy")
+    assert fstrategy is not None, "%s doesn't have FTVMStrategy registered" % op.name
+    with target:
+        strategy = fstrategy(attrs, inputs, out_type, target)
+    ret = []
+    for spec in strategy.specializations:
+        if spec.condition:
+            flag = True
+            for clause in spec.condition.clauses:
+                clause = tvm.ir_pass.Simplify(clause)
+                if isinstance(clause, tvm.expr.IntImm) and clause.value:
+                    continue
+                flag = False
+                break
+            if flag:
+                for impl in spec.implements:
+                    ret.append(impl)
+        else:
+            for impl in spec.implements:
+                ret.append(impl)
+    return ret
+
+
+def select_implement(op, attrs, inputs, out_type, target, use_autotvm=True):
+    """Select the best implement from the op strategy.
+
+    If use_autotvm is True, it'll first try to find the best implementation
+    based on AutoTVM profile results. If no AutoTVM profile result is found,
+    it'll choose the implementation with highest plevel.
+
+    If use_autotvm is False, it'll directly choose the implementation with
+    highest plevel.
+
+    Note that this function doesn't support op that has symbolic input shapes.
+
+    Parameters
+    ----------
+    op : relay.op.Op
+        Relay operator.
+
+    attrs : object
+        The op attribute.
+
+    inputs : list[tvm.Tensor]
+        Input tensors to the op.
+
+    out_type : relay.Type
+        The output type.
+
+    target : tvm.Target
+        The target to compile the op.
+
+    use_autotvm : bool
+        Whether query AutoTVM to pick the best.
+
+    Returns
+    -------
+    ret : tuple(relay.op.OpImplement, list[tvm.Tensor])
+        The best op implementation and the corresponding output tensors.
+    """
+    all_impls = get_valid_implements(op, attrs, inputs, out_type, target)
+
+    best_plevel_impl = None
+    for impl in all_impls:
+        if best_plevel_impl is None or int(impl.plevel) > int(best_plevel_impl.plevel):
 
 Review comment:
   Because `plevel` is `IntImm`, direct comparison between them will become an expr.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services