You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by ke...@apache.org on 2021/01/29 00:25:35 UTC

[tvm] branch main updated: Remove MemoryPlan from VM passes (#7361)

This is an automated email from the ASF dual-hosted git repository.

kevinthesun pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tvm.git


The following commit(s) were added to refs/heads/main by this push:
     new ef032b3  Remove MemoryPlan from VM passes (#7361)
ef032b3 is described below

commit ef032b3b30cb05f4fbf30f0c9e20869904a1cdc6
Author: Matthew Brookhart <mb...@octoml.ai>
AuthorDate: Thu Jan 28 17:25:19 2021 -0700

    Remove MemoryPlan from VM passes (#7361)
---
 src/relay/backend/vm/compiler.cc    | 7 +++++--
 src/relay/backend/vm/lambda_lift.cc | 1 -
 2 files changed, 5 insertions(+), 3 deletions(-)

diff --git a/src/relay/backend/vm/compiler.cc b/src/relay/backend/vm/compiler.cc
index 8fbe31e..d908153 100644
--- a/src/relay/backend/vm/compiler.cc
+++ b/src/relay/backend/vm/compiler.cc
@@ -985,8 +985,11 @@ transform::Sequential MemoryOpt(tvm::Target host_target, TargetsMap targets) {
   // Fuse the shape functions.
   pass_seqs.push_back(transform::FuseOps());
 
-  // Perform memory planning in order to coalesce/reduce allocations.
-  pass_seqs.push_back(transform::MemoryPlan());
+  // TODO(mbrookhart, jroesch, masahi): this pass is very slow, and is
+  // incomplete to provide memory resuse optimizations. Disable it until we can
+  // rewrite it in C++ and complete it.
+  // // Perform memory planning in order to coalesce/reduce allocations.
+  // pass_seqs.push_back(transform::MemoryPlan());
 
   // Compute away constant computation introduced by coalescing allocations.
   pass_seqs.push_back(transform::FoldConstant());
diff --git a/src/relay/backend/vm/lambda_lift.cc b/src/relay/backend/vm/lambda_lift.cc
index 8e9cc62..fe9a544 100644
--- a/src/relay/backend/vm/lambda_lift.cc
+++ b/src/relay/backend/vm/lambda_lift.cc
@@ -192,7 +192,6 @@ class LambdaLifter : public ExprMutator {
       global = module_->GetGlobalVar(name);
     } else {
       // Add the lifted function to the module.
-      std::cout << AsText(lifted_func) << std::endl;
       module_->Add(global, lifted_func);
     }