You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by GitBox <gi...@apache.org> on 2022/02/25 12:36:57 UTC

[GitHub] [tvm] manupa-arm commented on a change in pull request #10062: [microNPU][5] Convert Proposals to te.Schedules

manupa-arm commented on a change in pull request #10062:
URL: https://github.com/apache/tvm/pull/10062#discussion_r814732971



##########
File path: python/tvm/contrib/ethosu/cascader/scheduler.py
##########
@@ -0,0 +1,232 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Scheduler for cascader which converts Proposals into Schedules."""
+from typing import Tuple, List, Dict, DefaultDict
+from collections import defaultdict
+import numpy as np
+
+from tvm import te
+from tvm import tir
+from .cascader_options import CascaderOptions
+from .graph import CascaderGraph, Part, Tensor, TESubgraph
+from .tensor_config import MemoryRegion
+from .proposal import Proposal
+from .proposal_generator import generate_proposals
+from .graph import create_cascader_graph
+from .device_config import EthosuDeviceConfig
+
+
+def tile_nd(
+    sch: te.Schedule, tensor: te.Tensor, tile: Tuple[int, ...]
+) -> Tuple[List[tir.IterVar], List[tir.IterVar]]:
+    """Scheduling utility to perform N-dimensional tiling.
+
+    Parameters
+    ----------
+    sch : te.Schedule
+        The schedule to apply the tiling to.
+    tensor : te.Tensor
+        The tensor to apply the tiling to.
+    tile : Tuple[int, ...]
+        The N-dimensional tile size.
+
+    Returns
+    -------
+    outer_indices : List[tir.IterVar]
+        The outer iteration variables.
+    inner_indices : List[tir.IterVar]
+        The inner iteration variables.
+
+    """
+    outer_indices = []
+    inner_indices = []
+    for i, size in enumerate(tile):
+        outer, inner = sch[tensor].split(tensor.op.axis[i], size)
+        outer_indices.append(outer)
+        inner_indices.append(inner)
+
+    sch[tensor].reorder(*outer_indices, *inner_indices)
+    return outer_indices, inner_indices
+
+
+def stripe_part(
+    part: Part, stripe_shape: Tuple[int, ...], sch: te.Schedule
+) -> Tuple[te.Stage, tir.IterVar]:
+    """Apply a striping schedule to the TE subgraph represented by a Part."""
+    te_subgraph = part.subgraph
+    te_output_tensor = te_subgraph.output_tensor
+    outer_indices, _ = tile_nd(sch, te_output_tensor, stripe_shape)
+    g = sch.create_group(
+        outputs=te_output_tensor.op.input_tensors,
+        inputs=te_subgraph.input_tensors,
+        include_inputs=False,
+    )
+    g.compute_at(sch[te_output_tensor], outer_indices[-1])
+    for ax in outer_indices:
+        sch[te_output_tensor].unroll(ax)
+
+    return sch[te_output_tensor], outer_indices[-1]
+
+
+def cascade_part(
+    part: Part, stripe_stage: te.Stage, stripe_axis: tir.IterVar, sch: te.Schedule
+) -> None:
+    """Schedule a Part into a cascade indicated by a stripe Stage."""
+    te_subgraph = part.subgraph
+    g = sch.create_group(
+        outputs=te_subgraph.output_tensor, inputs=te_subgraph.input_tensors, include_inputs=False
+    )
+    g.compute_at(stripe_stage, stripe_axis)
+
+
+def update_readers(part: Part, readers: DefaultDict[te.Tensor, List[te.Tensor]]) -> None:
+    """Update a dictionary which stores the te.Tensors that need to be read in order to produce a given te.Tensor."""
+    visited = set()
+
+    def _visit(tensor):
+        if tensor is not visited and tensor not in part.subgraph.input_tensors:

Review comment:
       ```suggestion
           if tensor is not in visited and tensor not in part.subgraph.input_tensors:
   ```




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscribe@tvm.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org