You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by mo...@apache.org on 2021/05/24 16:21:45 UTC

[incubator-mxnet] branch v1.x updated: [v1.x] ONNX export for large model (#20283)

This is an automated email from the ASF dual-hosted git repository.

moisesher pushed a commit to branch v1.x
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git


The following commit(s) were added to refs/heads/v1.x by this push:
     new b95e880  [v1.x] ONNX export for large model (#20283)
b95e880 is described below

commit b95e880016e4685486941ec6062628be95ffb7a3
Author: waytrue17 <52...@users.noreply.github.com>
AuthorDate: Mon May 24 09:20:07 2021 -0700

    [v1.x] ONNX export for large model (#20283)
    
    * support large model
    
    * set all_tensors_to_one_file to false
    
    * fix sanity
    
    * add test
    
    * import onnx
    
    * update doc
    
    * fix typo
    
    Co-authored-by: Wei Chu <we...@amazon.com>
---
 python/mxnet/onnx/README.md                             |  6 ++++++
 python/mxnet/onnx/mx2onnx/_export_model.py              | 17 ++++++++++-------
 python/mxnet/onnx/mx2onnx/_export_onnx.py               |  3 +--
 .../_op_translations/_op_translations_opset12.py        | 12 +++---------
 tests/python-pytest/onnx/test_onnxruntime_cv.py         | 12 +++++++++++-
 5 files changed, 31 insertions(+), 19 deletions(-)

diff --git a/python/mxnet/onnx/README.md b/python/mxnet/onnx/README.md
index 299dd5c..984253b 100644
--- a/python/mxnet/onnx/README.md
+++ b/python/mxnet/onnx/README.md
@@ -63,6 +63,9 @@ Parameters:
         This is the old name of in_types. We keep this parameter name for backward compatibility
     input_shape : List of tuple
         This is the old name of in_shapes. We keep this parameter name for backward compatibility
+    large_model : Boolean
+        Whether to export a model that is larger than 2 GB. If true will save param tensors in separate
+        files along with .onnx model file. This feature is supported since onnx 1.8.0
 
 Returns:
 
@@ -75,6 +78,9 @@ When the model has multiple inputs, all the input shapes and dtypes must be prov
 #### Dynamic Shape Input
 We can set `dynamic=True` to turn on support for dynamic input shapes. Note that even with dynamic shapes, a set of static input shapes still need to be specified in `in_shapes`; on top of that, we'll also need to specify which dimensions of the input shapes are dynamic in `dynamic_input_shapes`. We can simply set the dynamic dimensions as `None`, e.g. `(1, 3, None, None)`, or use strings in place of the `None`'s for better understandability in the exported onnx graph, e.g. `(1, 3, 'Heig [...]
 
+#### Export Large Model
+Uses can set `large_model=True` to export models that are larger than 2GB. In this case, all parameter tensors will be saved into separate files along with the .onnx model file.
+
 ```python
 # The batch dimension will be dynamic in this case
 in_shapes = [(1, 3, 224, 224)]
diff --git a/python/mxnet/onnx/mx2onnx/_export_model.py b/python/mxnet/onnx/mx2onnx/_export_model.py
index e0fc71c..fbfadde 100644
--- a/python/mxnet/onnx/mx2onnx/_export_model.py
+++ b/python/mxnet/onnx/mx2onnx/_export_model.py
@@ -51,7 +51,7 @@ def get_operator_support(opset_version=None):
 def export_model(sym, params, in_shapes=None, in_types=np.float32,
                  onnx_file_path='model.onnx', verbose=False, dynamic=False,
                  dynamic_input_shapes=None, run_shape_inference=False, input_type=None,
-                 input_shape=None):
+                 input_shape=None, large_model=False):
     """Exports the MXNet model file, passed as a parameter, into ONNX model.
     Accepts both symbol,parameter objects as well as json and params filepaths as input.
     Operator support and coverage -
@@ -83,6 +83,9 @@ def export_model(sym, params, in_shapes=None, in_types=np.float32,
         This is the old name of in_types. We keep this parameter name for backward compatibility
     input_shape : List of tuple
         This is the old name of in_shapes. We keep this parameter name for backward compatibility
+    large_model : Boolean
+        Whether to export a model that is larger than 2 GB. If true will save param tensors in separate
+        files along with .onnx model file. This feature is supported since onnx 1.8.0
 
     Returns
     -------
@@ -96,6 +99,7 @@ def export_model(sym, params, in_shapes=None, in_types=np.float32,
     """
 
     try:
+        import onnx
         from onnx import helper, mapping, shape_inference
         from onnx.defs import onnx_opset_version
     except ImportError:
@@ -150,11 +154,10 @@ def export_model(sym, params, in_shapes=None, in_types=np.float32,
         except: # pylint: disable=bare-except
             logging.info("Shape inference failed, original export is kept.")
 
-    # Save model on disk
-    with open(onnx_file_path, "wb") as file_handle:
-        serialized = onnx_model.SerializeToString()
-        file_handle.write(serialized)
-        logging.info("Input shape of the model %s ", in_shapes)
-        logging.info("Exported ONNX file %s saved to disk", onnx_file_path)
+    if large_model:
+        from onnx.external_data_helper import convert_model_to_external_data
+        convert_model_to_external_data(onnx_model, all_tensors_to_one_file=False, location=onnx_file_path+'.data')
 
+    onnx.save_model(onnx_model, onnx_file_path)
+    onnx.checker.check_model(onnx_file_path)
     return onnx_file_path
diff --git a/python/mxnet/onnx/mx2onnx/_export_onnx.py b/python/mxnet/onnx/mx2onnx/_export_onnx.py
index 3af870e..0556078 100644
--- a/python/mxnet/onnx/mx2onnx/_export_onnx.py
+++ b/python/mxnet/onnx/mx2onnx/_export_onnx.py
@@ -275,7 +275,7 @@ class MXNetGraph(object):
             ONNX graph
         """
         try:
-            from onnx import (checker, helper, NodeProto, ValueInfoProto, TensorProto)
+            from onnx import (helper, NodeProto, ValueInfoProto, TensorProto)
             from onnx.helper import make_tensor_value_info
             from onnx.defs import onnx_opset_version
         except ImportError:
@@ -442,5 +442,4 @@ class MXNetGraph(object):
 
         graph.initializer.extend(initializer)
 
-        checker.check_graph(graph)
         return graph
diff --git a/python/mxnet/onnx/mx2onnx/_op_translations/_op_translations_opset12.py b/python/mxnet/onnx/mx2onnx/_op_translations/_op_translations_opset12.py
index b73c5bf..deda791 100644
--- a/python/mxnet/onnx/mx2onnx/_op_translations/_op_translations_opset12.py
+++ b/python/mxnet/onnx/mx2onnx/_op_translations/_op_translations_opset12.py
@@ -223,15 +223,9 @@ def convert_weights_and_inputs(node, **kwargs):
 
         tensor_node = onnx.helper.make_tensor_value_info(name, data_type, dims)
 
-        initializer.append(
-            onnx.helper.make_tensor(
-                name=name,
-                data_type=data_type,
-                dims=dims,
-                vals=np_arr.flatten().tolist(),
-                raw=False
-            )
-        )
+        from onnx import numpy_helper
+        tensor = numpy_helper.from_array(np_arr, name=name)
+        initializer.append(tensor)
 
         return [tensor_node], (np_arr.dtype,)
     else:
diff --git a/tests/python-pytest/onnx/test_onnxruntime_cv.py b/tests/python-pytest/onnx/test_onnxruntime_cv.py
index f0c454e..c1fd5f0 100644
--- a/tests/python-pytest/onnx/test_onnxruntime_cv.py
+++ b/tests/python-pytest/onnx/test_onnxruntime_cv.py
@@ -61,6 +61,12 @@ class GluonModel():
                              dynamic_input_shapes=dynamic_input_shapes)
         return onnx_file
 
+    def export_onnx_large_model(self):
+        onnx_file = self.modelpath + ".onnx"
+        mx.onnx.export_model(self.modelpath + "-symbol.json", self.modelpath + "-0000.params",
+                             [self.input_shape], self.input_dtype, onnx_file, large_model=True)
+        return onnx_file
+
     def export_onnx_argaux(self):
         onnx_file = self.modelpath + ".onnx"
         sym_file = self.modelpath + "-symbol.json"
@@ -322,7 +328,11 @@ def test_obj_detection_model_inference_onnxruntime(tmp_path, model, obj_detectio
     try:
         tmp_path = str(tmp_path)
         M = GluonModel(model, (1,3,512,512), 'float32', tmp_path)
-        onnx_file = M.export_onnx()
+        if model in ['yolo3_darknet53_coco']:
+            # test for large_model feature
+            onnx_file = M.export_onnx_large_model()
+        else:
+            onnx_file = M.export_onnx()
         # create onnxruntime session using the generated onnx file
         ses_opt = onnxruntime.SessionOptions()
         ses_opt.log_severity_level = 3