You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by ma...@apache.org on 2020/12/11 03:42:31 UTC

[tvm] branch main updated: Add softplus operator conversion to Onnx. (#7089)

This is an automated email from the ASF dual-hosted git repository.

masahi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tvm.git


The following commit(s) were added to refs/heads/main by this push:
     new ac25b4b  Add softplus operator conversion to Onnx. (#7089)
ac25b4b is described below

commit ac25b4b9052d2fd931979b2ec02aa95c57961c9f
Author: Josh Fromm <jw...@uw.edu>
AuthorDate: Thu Dec 10 19:42:18 2020 -0800

    Add softplus operator conversion to Onnx. (#7089)
---
 python/tvm/relay/frontend/onnx.py          | 12 ++++++++++++
 tests/python/frontend/onnx/test_forward.py | 29 +++++++++++++++++++++++++++++
 2 files changed, 41 insertions(+)

diff --git a/python/tvm/relay/frontend/onnx.py b/python/tvm/relay/frontend/onnx.py
index 0b6ebdb..f0d7e2d 100644
--- a/python/tvm/relay/frontend/onnx.py
+++ b/python/tvm/relay/frontend/onnx.py
@@ -2095,6 +2095,17 @@ class Clip(OnnxOpConverter):
         return result
 
 
+class Softplus(OnnxOpConverter):
+    """Operator converter for Softplus."""
+
+    @classmethod
+    def _impl_v1(cls, inputs, attr, params):
+        data = inputs[0]
+        data_dtype = infer_type(data).checked_type.dtype
+        data = _op.exp(data) + _expr.const(1, dtype=data_dtype)
+        return _op.log(data)
+
+
 class Loop(OnnxOpConverter):
     """Operator converter for Loop"""
 
@@ -2371,6 +2382,7 @@ def _get_convert_map(opset):
         "Sum": Sum.get_converter(opset),
         "Mean": Mean.get_converter(opset),
         "Clip": Clip.get_converter(opset),
+        "Softplus": Softplus.get_converter(opset),
         # softmax default axis is different in onnx
         "Softmax": Softmax.get_converter(opset),
         "LogSoftmax": AttrCvt("log_softmax", {"axis": ("axis", 1)}),
diff --git a/tests/python/frontend/onnx/test_forward.py b/tests/python/frontend/onnx/test_forward.py
index 1e0b729..d7a07f7 100644
--- a/tests/python/frontend/onnx/test_forward.py
+++ b/tests/python/frontend/onnx/test_forward.py
@@ -3983,6 +3983,34 @@ def test_maxunpool():
     verify_maxunpool(xT, xI, [2, 2], strides=[2, 2], pads=pads)
 
 
+@tvm.testing.uses_gpu
+def test_softplus():
+    def verify_softplus(indata):
+        node = helper.make_node(
+            "Softplus",
+            inputs=["X"],
+            outputs=["Y"],
+        )
+
+        graph = helper.make_graph(
+            [node],
+            "softplus_test",
+            inputs=[helper.make_tensor_value_info("X", TensorProto.FLOAT, list(indata.shape))],
+            outputs=[helper.make_tensor_value_info("Y", TensorProto.FLOAT, list(indata.shape))],
+        )
+
+        model = helper.make_model(graph, producer_name="softplus_test")
+
+        verify_with_ort_with_inputs(model, [indata], dtype="float32", use_vm=True, opset=11)
+
+    # Simple case with all signs.
+    input_data = np.array([[-1, 0, 1]], dtype=np.float32)
+    verify_softplus(input_data)
+    # More fancy case.
+    input_data = np.random.randn(1, 32, 32, 3).astype("float32")
+    verify_softplus(input_data)
+
+
 if __name__ == "__main__":
     test_flatten()
     test_reshape()
@@ -4061,3 +4089,4 @@ if __name__ == "__main__":
     test_loop()
     test_size()
     test_maxunpool()
+    test_softplus()