You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by wk...@apache.org on 2020/01/10 15:46:19 UTC

[incubator-mxnet] branch master updated: Minor fix, use RAII for TensorRT builder and network object (#17189)

This is an automated email from the ASF dual-hosted git repository.

wkcn pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git


The following commit(s) were added to refs/heads/master by this push:
     new 651eb9d  Minor fix, use RAII for TensorRT builder and network object (#17189)
651eb9d is described below

commit 651eb9d5610464dac2603e64cd110d2bdf63bd8a
Author: taoli <li...@live.cn>
AuthorDate: Fri Jan 10 23:45:17 2020 +0800

    Minor fix, use RAII for TensorRT builder and network object (#17189)
---
 src/operator/subgraph/tensorrt/onnx_to_tensorrt.cc | 6 ++----
 1 file changed, 2 insertions(+), 4 deletions(-)

diff --git a/src/operator/subgraph/tensorrt/onnx_to_tensorrt.cc b/src/operator/subgraph/tensorrt/onnx_to_tensorrt.cc
index 4e7ff66..b02d109 100644
--- a/src/operator/subgraph/tensorrt/onnx_to_tensorrt.cc
+++ b/src/operator/subgraph/tensorrt/onnx_to_tensorrt.cc
@@ -77,8 +77,8 @@ std::tuple<unique_ptr<nvinfer1::ICudaEngine>,
   GOOGLE_PROTOBUF_VERIFY_VERSION;
 
   auto trt_logger = std::unique_ptr<TRT_Logger>(new TRT_Logger(verbosity));
-  auto trt_builder = nvinfer1::createInferBuilder(*trt_logger);
-  auto trt_network = trt_builder->createNetwork();
+  auto trt_builder = InferObject(nvinfer1::createInferBuilder(*trt_logger));
+  auto trt_network = InferObject(trt_builder->createNetwork());
   auto trt_parser  = InferObject(nvonnxparser::createParser(*trt_network, *trt_logger));
   ::ONNX_NAMESPACE::ModelProto parsed_model;
   // We check for a valid parse, but the main effect is the side effect
@@ -125,8 +125,6 @@ std::tuple<unique_ptr<nvinfer1::ICudaEngine>,
   trt_builder->setMaxWorkspaceSize(max_workspace_size);
   trt_builder->setDebugSync(debug_builder);
   auto trt_engine = InferObject(trt_builder->buildCudaEngine(*trt_network));
-  trt_builder->destroy();
-  trt_network->destroy();
   return std::make_tuple(std::move(trt_engine), std::move(trt_parser), std::move(trt_logger));
 }