You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by GitBox <gi...@apache.org> on 2022/07/18 23:08:21 UTC

[GitHub] [tvm] sunggg commented on a diff in pull request #12087: [UMA] UMA v1.0

sunggg commented on code in PR #12087:
URL: https://github.com/apache/tvm/pull/12087#discussion_r923907669


##########
python/tvm/relay/backend/contrib/uma/_template/backend.py:
##########
@@ -0,0 +1,53 @@
+# Licensed to the Apache Software Foundation (ASF) under one

Review Comment:
   `_template` directory seems for testing/example purpose? At first, it sounded like it is a directory for templates that users can invoke or inherit to customize their accelerators to me. 



##########
src/relay/backend/contrib/uma/tir_to_runtime.cc:
##########
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+#include <cmath>
+#include <fstream>
+#include <map>
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include "../../../../runtime/file_utils.h"
+#include "../../../../target/source/codegen_c.h"
+#include "../../../../target/source/codegen_c_host.h"
+
+namespace tvm {
+using namespace tir;
+namespace relay {
+namespace contrib {
+namespace uma {
+
+class UMACodegen : public codegen::CodeGenCHost {
+ public:
+  explicit UMACodegen(String target_str) : target_str_(target_str) {}
+
+  void Init(bool output_ssa, bool emit_asserts) {
+    auto includes_pf =
+        tvm::runtime::Registry::Get("relay.ext.uma.codegen_c_includes_" + target_str_);
+    ICHECK(includes_pf);
+    String includes = (*includes_pf)();
+    decl_stream << includes;
+    std::unordered_set<std::string> devices;
+    devices.insert(target_str_);
+    CodeGenCHost::Init(output_ssa, emit_asserts, target_str_, devices);
+  }
+
+  /*!
+   * \brief Emit code that offloads a subgraph to the UMA target
+   *
+   * \return string of code that offloads a subgraph to the UMA target
+   */
+  void AddFunction(const PrimFunc& prim_func) { CodeGenC::AddFunction(prim_func); }
+
+ private:
+  String target_str_;
+
+  using codegen::CodeGenCHost::VisitStmt_;
+
+  /*!  * \brief Emits target specific APIs for every call_extern */
+  void VisitExpr_(const CallNode* op, std::ostream& os) final {
+    if (!op->op.same_as(builtin::call_extern())) {
+      CodeGenCHost::VisitExpr_(op, os);
+      return;
+    }
+    auto replace_call_extern_pf =
+        tvm::runtime::Registry::Get("relay.ext.uma.codegen_c_replace_call_extern_" + target_str_);
+    if (replace_call_extern_pf == nullptr) {
+      CodeGenCHost::VisitExpr_(op, os);
+    } else {
+      // - funtion type (void) still gets printed before CallNode if extern call is wrapped in
+      // EvaluateNode
+      // - VarNode arguments might have "wrong" name_hints. The correct variable name is determined
+      // in C++ through GetVarID
+      String api_string = (*replace_call_extern_pf)(op->args);
+      os << api_string;
+    }
+    return;
+  }
+};
+
+runtime::Module TIRToRuntime(IRModule mod, Target target) {
+  bool output_ssa = false;
+  bool emit_asserts = false;
+  UMACodegen codegen(target->kind->name);
+  Array<String> function_names;
+  codegen.Init(output_ssa, emit_asserts);
+  for (auto kv : mod->functions) {
+    auto prim_func = Downcast<PrimFunc>(kv.second);
+    auto global_symbol = prim_func->GetAttr<String>(tvm::attr::kGlobalSymbol);
+    function_names.push_back(global_symbol.value());
+    codegen.AddFunction(prim_func);
+  }
+  std::string code = codegen.Finish();
+  return codegen::CSourceModuleCreate(code, "c", function_names);

Review Comment:
   In my current understanding, we may need to invoke driver or DMA for loosely-connected accelerators (LCAs).
   How does UMA codegen handle this?



##########
python/tvm/relay/backend/contrib/uma/_template/backend.py:
##########
@@ -0,0 +1,53 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""UMA backend for the my_ai_hw accelerator"""
+from .passes import MyAiHwConv2dPass
+from ..api.utils import PassPhase
+from ..backend import UMABackend
+from .codegen import gen_includes, gen_replace_call_extern
+from .patterns import conv2d_pattern
+
+
+class MyAiHwBackend(UMABackend):
+    """UMA backend for the MyAiHw accelerator."""
+
+    def __init__(self):
+        super().__init__()
+
+        #######################################################################
+        # Target configuration
+        #######################################################################
+        self._register_target_attr("dimension")
+
+        #######################################################################
+        # Relay to Relay function registration

Review Comment:
   Not sure if this is a correct comment. Maybe "Relay Pattern registration"?



##########
python/tvm/relay/backend/contrib/uma/api/partitioner.py:
##########
@@ -0,0 +1,118 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Partitioner base class of the Universal Modular Accelerator Interface (UMA)"""
+
+from typing import Callable, Dict, List, Tuple, Optional
+
+import tvm
+from tvm import relay
+from tvm.relay.build_module import bind_params_by_name
+from tvm.relay.op.contrib.register import register_pattern_table
+from .utils import PassPhase
+
+
+PatternTable = List[Tuple[str, tvm.relay.dataflow_pattern.DFPattern, Callable]]
+
+
+class UMAPartitioner:

Review Comment:
   How does this partitioner works with other partitioners? For example, can we use UMA with Collage? 



##########
python/tvm/relay/backend/contrib/uma/api/utils.py:
##########
@@ -0,0 +1,52 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Utility methods for the Universal Modular Accelerator Interface (UMA)"""
+
+from enum import Enum, auto
+import uuid
+import tvm.tir
+from tvm.contrib import utils, clang
+
+
+class PassPhase(Enum):
+    """UMA pass phases."""
+
+    PRE_PARTITIONING = auto()
+    POST_PARTITIONING_0 = auto()

Review Comment:
   Could you elaborate what each phase means?



##########
python/tvm/relay/backend/contrib/uma/api/codegen.py:
##########
@@ -0,0 +1,53 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Codegen base class of the Universal Modular Accelerator Interface (UMA)"""
+
+from typing import Callable
+import tvm
+
+
+class UMACodegen(object):
+    """
+    Codegen base class of the Universal Modular Accelerator Interface (UMA)
+    """
+
+    def __init__(self, target_name: str) -> None:
+        self.target_name = target_name
+
+    def _register_codegen(self, fmt: str = "c", **kwargs) -> None:
+        if fmt == "c":
+            self._register_c_codegen(**kwargs)
+        else:
+            raise RuntimeError(f'Unsupported codegen format "{fmt}"')
+
+    def _register_c_codegen(
+        self,
+        includes: Callable[[], str] = None,

Review Comment:
   Could you add the docstring for parameters? 
   Also, can `includes` be `Optional[str]`? 



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscribe@tvm.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org