You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by GitBox <gi...@apache.org> on 2021/11/23 10:55:19 UTC

[GitHub] [tvm] lhutton1 commented on a change in pull request #9547: [microNPU] Add the infrastructure for lookup table and TANH

lhutton1 commented on a change in pull request #9547:
URL: https://github.com/apache/tvm/pull/9547#discussion_r754663191



##########
File path: python/tvm/relay/backend/contrib/ethosu/codegen.py
##########
@@ -22,6 +22,109 @@
 from tvm.relay.backend.contrib.ethosu.legalize import LegalizeEthosU
 from tvm.relay.backend.contrib.ethosu import tir_to_cs_translator
 from tvm.relay.backend.contrib.ethosu import util
+from tvm.relay.expr_functor import ExprMutator
+from tvm.ir.transform import Pass
+
+# pylint: disable=unused-import
+from tvm.relay.backend.contrib.ethosu.op import op_attrs
+from tvm.relay.backend.contrib.ethosu import op
+
+
+class OptimizeLUTs(ExprMutator):
+    """A pass to merge an identity operator with a LUT based activation function with
+    a preceding operator provided that operator can do a table lookup for the activation
+    in the hardware"""
+
+    def __init__(self):
+        super().__init__()
+        self.lut_ops = {
+            "contrib.ethosu.conv2d": op.ethosu_conv2d,
+            "contrib.ethosu.depthwise_conv2d": op.ethosu_depthwise_conv2d,
+            "contrib.ethosu.pooling": op.ethosu_pooling,
+        }
+
+    def create_op_with_lut(self, call):
+        """Extract the parameters and attributes from the NPU operator and create
+        a new operator with LUT.
+        ----------

Review comment:
       Nit: missing `Parameters` and I think we need spacing for these doc strings?

##########
File path: python/tvm/relay/backend/contrib/ethosu/codegen.py
##########
@@ -22,6 +22,109 @@
 from tvm.relay.backend.contrib.ethosu.legalize import LegalizeEthosU
 from tvm.relay.backend.contrib.ethosu import tir_to_cs_translator
 from tvm.relay.backend.contrib.ethosu import util
+from tvm.relay.expr_functor import ExprMutator
+from tvm.ir.transform import Pass
+
+# pylint: disable=unused-import
+from tvm.relay.backend.contrib.ethosu.op import op_attrs
+from tvm.relay.backend.contrib.ethosu import op
+
+
+class OptimizeLUTs(ExprMutator):
+    """A pass to merge an identity operator with a LUT based activation function with
+    a preceding operator provided that operator can do a table lookup for the activation
+    in the hardware"""
+
+    def __init__(self):
+        super().__init__()
+        self.lut_ops = {
+            "contrib.ethosu.conv2d": op.ethosu_conv2d,
+            "contrib.ethosu.depthwise_conv2d": op.ethosu_depthwise_conv2d,
+            "contrib.ethosu.pooling": op.ethosu_pooling,
+        }
+
+    def create_op_with_lut(self, call):
+        """Extract the parameters and attributes from the NPU operator and create
+        a new operator with LUT.
+        ----------
+        call : tvm.relay.expr.Call
+            The current call node being visited.
+        Returns
+        -------
+        tvm.relay.expr.Call
+            The new operator with LUT.
+        """
+        identity = call
+        ethosu_op = call.args[0]
+        lut = identity.args[1]
+        activation = identity.attrs.activation
+
+        new_attrs = dict(ethosu_op.attrs)
+        new_attrs["activation"] = activation
+
+        # Assume that LUT is always the last argument
+        new_args = [ethosu_op.args[n] for n in range(len(ethosu_op.args) - 1)]
+        new_args.append(lut)
+        assert ethosu_op.op.name in self.lut_ops.keys()
+
+        return self.lut_ops[ethosu_op.op.name](*new_args, **new_attrs)
+
+    def visit_call(self, call: tvm.relay.expr.Call) -> tvm.relay.expr.Call:
+        """Recursively visit call nodes in the input graph and if an ethosu.identity
+        operator with LUT is found and the preceding operator has a LUT attribute, create
+        a new NPU operator.
+        Parameters
+        ----------
+        call : tvm.relay.expr.Call
+            The current call node being visited.
+        Returns
+        -------
+        tvm.relay.expr.Call
+            The input call node in the case the current call node does
+            not refer to an Op. Else, a new call node with a new operator.
+        """
+        new_call = call
+        lut_activations = ["TANH", "LUT"]
+
+        if (
+            call.op.name == "contrib.ethosu.identity"
+            and call.attrs.activation in lut_activations
+            and isinstance(call.args[0], tvm.relay.expr.Call)
+        ):
+            producer_op = call.args[0]
+            # Check if the producer can do a LUT operation
+            if producer_op.op.name in self.lut_ops.keys():
+                # Check the producer doesn't already have a LUT
+                has_lut = producer_op.attrs.activation in lut_activations
+                if not has_lut:
+                    new_call = self.create_op_with_lut(call)
+
+        new_call = super().visit_call(new_call)
+
+        return new_call
+
+
+@relay.transform.function_pass(opt_level=1, name="LutOptimizer")
+class LUTsOptimizer(Pass):
+    """Register LutOptimizer as a relay pass."""

Review comment:
       Nit: `LUTsOptimizer` and the same for `name=LUTsOptimizer` above.

##########
File path: python/tvm/relay/backend/contrib/ethosu/codegen.py
##########
@@ -22,6 +22,109 @@
 from tvm.relay.backend.contrib.ethosu.legalize import LegalizeEthosU
 from tvm.relay.backend.contrib.ethosu import tir_to_cs_translator
 from tvm.relay.backend.contrib.ethosu import util
+from tvm.relay.expr_functor import ExprMutator
+from tvm.ir.transform import Pass
+
+# pylint: disable=unused-import
+from tvm.relay.backend.contrib.ethosu.op import op_attrs
+from tvm.relay.backend.contrib.ethosu import op
+
+
+class OptimizeLUTs(ExprMutator):
+    """A pass to merge an identity operator with a LUT based activation function with
+    a preceding operator provided that operator can do a table lookup for the activation
+    in the hardware"""
+
+    def __init__(self):
+        super().__init__()
+        self.lut_ops = {
+            "contrib.ethosu.conv2d": op.ethosu_conv2d,
+            "contrib.ethosu.depthwise_conv2d": op.ethosu_depthwise_conv2d,
+            "contrib.ethosu.pooling": op.ethosu_pooling,
+        }
+
+    def create_op_with_lut(self, call):
+        """Extract the parameters and attributes from the NPU operator and create
+        a new operator with LUT.
+        ----------
+        call : tvm.relay.expr.Call
+            The current call node being visited.
+        Returns
+        -------
+        tvm.relay.expr.Call
+            The new operator with LUT.
+        """
+        identity = call
+        ethosu_op = call.args[0]
+        lut = identity.args[1]
+        activation = identity.attrs.activation
+
+        new_attrs = dict(ethosu_op.attrs)
+        new_attrs["activation"] = activation
+
+        # Assume that LUT is always the last argument
+        new_args = [ethosu_op.args[n] for n in range(len(ethosu_op.args) - 1)]
+        new_args.append(lut)
+        assert ethosu_op.op.name in self.lut_ops.keys()
+
+        return self.lut_ops[ethosu_op.op.name](*new_args, **new_attrs)
+
+    def visit_call(self, call: tvm.relay.expr.Call) -> tvm.relay.expr.Call:
+        """Recursively visit call nodes in the input graph and if an ethosu.identity
+        operator with LUT is found and the preceding operator has a LUT attribute, create
+        a new NPU operator.
+        Parameters
+        ----------
+        call : tvm.relay.expr.Call
+            The current call node being visited.
+        Returns
+        -------
+        tvm.relay.expr.Call
+            The input call node in the case the current call node does
+            not refer to an Op. Else, a new call node with a new operator.
+        """
+        new_call = call
+        lut_activations = ["TANH", "LUT"]
+
+        if (
+            call.op.name == "contrib.ethosu.identity"
+            and call.attrs.activation in lut_activations
+            and isinstance(call.args[0], tvm.relay.expr.Call)
+        ):
+            producer_op = call.args[0]
+            # Check if the producer can do a LUT operation
+            if producer_op.op.name in self.lut_ops.keys():
+                # Check the producer doesn't already have a LUT
+                has_lut = producer_op.attrs.activation in lut_activations
+                if not has_lut:
+                    new_call = self.create_op_with_lut(call)
+
+        new_call = super().visit_call(new_call)
+
+        return new_call
+
+
+@relay.transform.function_pass(opt_level=1, name="LutOptimizer")
+class LUTsOptimizer(Pass):
+    """Register LutOptimizer as a relay pass."""
+
+    def transform_function(
+        self, func: tvm.relay.function.Function, mod: tvm.IRModule, _
+    ) -> tvm.IRModule:
+        """Visit relay nodes in the given module.
+        Parameters
+        ----------
+        func : tvm.relay.function.Function
+            The function to apply the layout optimization pass to.

Review comment:
       ```suggestion
               The function to apply the optimization pass for multiple LUTs to.
   ```

##########
File path: python/tvm/relay/backend/contrib/ethosu/codegen.py
##########
@@ -22,6 +22,109 @@
 from tvm.relay.backend.contrib.ethosu.legalize import LegalizeEthosU
 from tvm.relay.backend.contrib.ethosu import tir_to_cs_translator
 from tvm.relay.backend.contrib.ethosu import util
+from tvm.relay.expr_functor import ExprMutator
+from tvm.ir.transform import Pass
+
+# pylint: disable=unused-import
+from tvm.relay.backend.contrib.ethosu.op import op_attrs
+from tvm.relay.backend.contrib.ethosu import op
+
+
+class OptimizeLUTs(ExprMutator):
+    """A pass to merge an identity operator with a LUT based activation function with
+    a preceding operator provided that operator can do a table lookup for the activation
+    in the hardware"""
+
+    def __init__(self):
+        super().__init__()
+        self.lut_ops = {
+            "contrib.ethosu.conv2d": op.ethosu_conv2d,
+            "contrib.ethosu.depthwise_conv2d": op.ethosu_depthwise_conv2d,
+            "contrib.ethosu.pooling": op.ethosu_pooling,
+        }
+
+    def create_op_with_lut(self, call):
+        """Extract the parameters and attributes from the NPU operator and create
+        a new operator with LUT.
+        ----------
+        call : tvm.relay.expr.Call
+            The current call node being visited.
+        Returns
+        -------
+        tvm.relay.expr.Call
+            The new operator with LUT.
+        """
+        identity = call
+        ethosu_op = call.args[0]
+        lut = identity.args[1]
+        activation = identity.attrs.activation
+
+        new_attrs = dict(ethosu_op.attrs)
+        new_attrs["activation"] = activation
+
+        # Assume that LUT is always the last argument
+        new_args = [ethosu_op.args[n] for n in range(len(ethosu_op.args) - 1)]
+        new_args.append(lut)
+        assert ethosu_op.op.name in self.lut_ops.keys()
+
+        return self.lut_ops[ethosu_op.op.name](*new_args, **new_attrs)
+
+    def visit_call(self, call: tvm.relay.expr.Call) -> tvm.relay.expr.Call:
+        """Recursively visit call nodes in the input graph and if an ethosu.identity
+        operator with LUT is found and the preceding operator has a LUT attribute, create
+        a new NPU operator.
+        Parameters
+        ----------
+        call : tvm.relay.expr.Call
+            The current call node being visited.
+        Returns
+        -------
+        tvm.relay.expr.Call
+            The input call node in the case the current call node does
+            not refer to an Op. Else, a new call node with a new operator.
+        """
+        new_call = call
+        lut_activations = ["TANH", "LUT"]
+
+        if (
+            call.op.name == "contrib.ethosu.identity"
+            and call.attrs.activation in lut_activations
+            and isinstance(call.args[0], tvm.relay.expr.Call)
+        ):
+            producer_op = call.args[0]
+            # Check if the producer can do a LUT operation
+            if producer_op.op.name in self.lut_ops.keys():
+                # Check the producer doesn't already have a LUT
+                has_lut = producer_op.attrs.activation in lut_activations
+                if not has_lut:
+                    new_call = self.create_op_with_lut(call)
+
+        new_call = super().visit_call(new_call)
+
+        return new_call
+
+
+@relay.transform.function_pass(opt_level=1, name="LutOptimizer")
+class LUTsOptimizer(Pass):
+    """Register LutOptimizer as a relay pass."""
+
+    def transform_function(
+        self, func: tvm.relay.function.Function, mod: tvm.IRModule, _
+    ) -> tvm.IRModule:
+        """Visit relay nodes in the given module.
+        Parameters
+        ----------
+        func : tvm.relay.function.Function
+            The function to apply the layout optimization pass to.
+        mod : tvm.IRModule
+            The module to apply the layout optimization pass to.
+        Returns
+        -------
+        mod : tvm.IRModule
+            New module with augmented layouts.

Review comment:
       ```suggestion
               New module with optimized LUTs.
   ```

##########
File path: python/tvm/relay/backend/contrib/ethosu/codegen.py
##########
@@ -22,6 +22,109 @@
 from tvm.relay.backend.contrib.ethosu.legalize import LegalizeEthosU
 from tvm.relay.backend.contrib.ethosu import tir_to_cs_translator
 from tvm.relay.backend.contrib.ethosu import util
+from tvm.relay.expr_functor import ExprMutator
+from tvm.ir.transform import Pass
+
+# pylint: disable=unused-import
+from tvm.relay.backend.contrib.ethosu.op import op_attrs
+from tvm.relay.backend.contrib.ethosu import op
+
+
+class OptimizeLUTs(ExprMutator):
+    """A pass to merge an identity operator with a LUT based activation function with
+    a preceding operator provided that operator can do a table lookup for the activation
+    in the hardware"""
+
+    def __init__(self):
+        super().__init__()
+        self.lut_ops = {
+            "contrib.ethosu.conv2d": op.ethosu_conv2d,
+            "contrib.ethosu.depthwise_conv2d": op.ethosu_depthwise_conv2d,
+            "contrib.ethosu.pooling": op.ethosu_pooling,
+        }
+
+    def create_op_with_lut(self, call):
+        """Extract the parameters and attributes from the NPU operator and create
+        a new operator with LUT.
+        ----------
+        call : tvm.relay.expr.Call
+            The current call node being visited.
+        Returns
+        -------
+        tvm.relay.expr.Call
+            The new operator with LUT.
+        """
+        identity = call
+        ethosu_op = call.args[0]
+        lut = identity.args[1]
+        activation = identity.attrs.activation
+
+        new_attrs = dict(ethosu_op.attrs)
+        new_attrs["activation"] = activation
+
+        # Assume that LUT is always the last argument
+        new_args = [ethosu_op.args[n] for n in range(len(ethosu_op.args) - 1)]
+        new_args.append(lut)
+        assert ethosu_op.op.name in self.lut_ops.keys()
+
+        return self.lut_ops[ethosu_op.op.name](*new_args, **new_attrs)
+
+    def visit_call(self, call: tvm.relay.expr.Call) -> tvm.relay.expr.Call:
+        """Recursively visit call nodes in the input graph and if an ethosu.identity
+        operator with LUT is found and the preceding operator has a LUT attribute, create
+        a new NPU operator.
+        Parameters
+        ----------
+        call : tvm.relay.expr.Call
+            The current call node being visited.
+        Returns
+        -------
+        tvm.relay.expr.Call
+            The input call node in the case the current call node does
+            not refer to an Op. Else, a new call node with a new operator.
+        """
+        new_call = call
+        lut_activations = ["TANH", "LUT"]
+
+        if (
+            call.op.name == "contrib.ethosu.identity"

Review comment:
       Should there be a check here to make sure `call` is an `op`?

##########
File path: python/tvm/relay/backend/contrib/ethosu/legalize.py
##########
@@ -123,6 +124,80 @@ def __call__(self, *args, **kwargs):
         pass
 
 
+def round_away_zero(f):
+    r = -0.5 if (f < 0) else 0.5
+    return np.trunc(f + r)
+
+
+def find_tanh_values(ifm_scale, ifm_zp, ofm_scale, ofm_zp):
+    """Method to calculate the values of the tanh lookup table"""
+    lut_values = list()
+    # Only int8 is currently supported
+    dtype = np.int8
+    qmin, qmax = np.iinfo(dtype).min, np.iinfo(dtype).max
+    for x in range(qmin, qmax + 1):
+        x_real = ifm_scale * (x - ifm_zp)
+        out_real = math.tanh(x_real)
+        lut_result = int(round_away_zero(ofm_zp + out_real / ofm_scale))
+        lut_result = min(qmax, max(qmin, lut_result))
+        lut_values.append(lut_result)
+
+    return lut_values
+
+
+class TanhRewriter(DFPatternCallback):
+    """This pass adds tanh as a LUT to the identity operator"""
+
+    def __init__(self):
+        super().__init__(require_type=True, rewrite_once=True)
+        self.pattern = (
+            wildcard().has_attr({"Composite": ethosu_patterns.TanhParams.composite_name})
+        )(wildcard())
+
+    def callback(self, pre, post, node_map):
+        id_input = post.args[0]
+
+        quantize_args = post.op.body.args
+        output_scale = float(quantize_args[1].data.asnumpy())
+        output_zp = int(quantize_args[2].data.asnumpy())
+
+        dequantize_args = quantize_args[0].args[0].args
+        input_scale = float(dequantize_args[1].data.asnumpy())
+        input_zp = int(dequantize_args[2].data.asnumpy())
+
+        lut_values = find_tanh_values(input_scale, input_zp, output_scale, output_zp)
+        lut = relay.const(lut_values, dtype="uint8")

Review comment:
       Just curious, can LUT be any other dtype?

##########
File path: python/tvm/relay/backend/contrib/ethosu/legalize.py
##########
@@ -123,6 +124,80 @@ def __call__(self, *args, **kwargs):
         pass
 
 
+def round_away_zero(f):

Review comment:
       Could we reuse `round_away_zero` from `util.py`?

##########
File path: python/tvm/relay/backend/contrib/ethosu/codegen.py
##########
@@ -22,6 +22,109 @@
 from tvm.relay.backend.contrib.ethosu.legalize import LegalizeEthosU
 from tvm.relay.backend.contrib.ethosu import tir_to_cs_translator
 from tvm.relay.backend.contrib.ethosu import util
+from tvm.relay.expr_functor import ExprMutator
+from tvm.ir.transform import Pass
+
+# pylint: disable=unused-import
+from tvm.relay.backend.contrib.ethosu.op import op_attrs
+from tvm.relay.backend.contrib.ethosu import op
+
+
+class OptimizeLUTs(ExprMutator):
+    """A pass to merge an identity operator with a LUT based activation function with
+    a preceding operator provided that operator can do a table lookup for the activation
+    in the hardware"""
+
+    def __init__(self):
+        super().__init__()
+        self.lut_ops = {
+            "contrib.ethosu.conv2d": op.ethosu_conv2d,
+            "contrib.ethosu.depthwise_conv2d": op.ethosu_depthwise_conv2d,
+            "contrib.ethosu.pooling": op.ethosu_pooling,
+        }
+
+    def create_op_with_lut(self, call):
+        """Extract the parameters and attributes from the NPU operator and create
+        a new operator with LUT.
+        ----------
+        call : tvm.relay.expr.Call
+            The current call node being visited.
+        Returns
+        -------
+        tvm.relay.expr.Call
+            The new operator with LUT.
+        """
+        identity = call
+        ethosu_op = call.args[0]
+        lut = identity.args[1]
+        activation = identity.attrs.activation
+
+        new_attrs = dict(ethosu_op.attrs)
+        new_attrs["activation"] = activation
+
+        # Assume that LUT is always the last argument
+        new_args = [ethosu_op.args[n] for n in range(len(ethosu_op.args) - 1)]
+        new_args.append(lut)

Review comment:
       ```suggestion
           new_args = ethosu_op.args[:-1] + [lut]
   ```

##########
File path: tests/python/contrib/test_ethosu/test_lookup_table.py
##########
@@ -0,0 +1,181 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# pylint: disable=invalid-name, unused-argument
+import pytest
+
+pytest.importorskip("ethosu.vela")
+import numpy as np
+import tflite.Model
+
+import tvm
+import tensorflow as tf
+from tvm import relay
+from tvm.relay.op.contrib.ethosu import partition_for_ethosu
+from tvm.relay.build_module import bind_params_by_name  # type: ignore
+
+from . import infra
+
+
+ACCEL_TYPES = ["ethos-u55-256", "ethos-u55-128", "ethos-u55-64", "ethos-u55-32"]
+
+
+@pytest.mark.parametrize("accel_type", ACCEL_TYPES)
+def test_tflite_lut_activations(accel_type):
+
+    dtype = "int8"
+    ifm_shape = (1, 55, 55, 3)
+
+    def create_tflite_graph():
+        tf.config.run_functions_eagerly(True)

Review comment:
       Do we need to enable eager execution?




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscribe@tvm.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org