You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by ak...@apache.org on 2021/05/13 14:18:03 UTC

[incubator-mxnet] branch master updated: Fusing gelu post operator in Fully Connected symbol (#20228)

This is an automated email from the ASF dual-hosted git repository.

akarbown pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git


The following commit(s) were added to refs/heads/master by this push:
     new b209907  Fusing gelu post operator in Fully Connected symbol (#20228)
b209907 is described below

commit b2099074ca6bde258a4eb2456fdc1a08b0453735
Author: AdamGrabowski <ad...@intel.com>
AuthorDate: Thu May 13 16:16:01 2021 +0200

    Fusing gelu post operator in Fully Connected symbol (#20228)
---
 src/operator/subgraph/mkldnn/mkldnn_fc-inl.h      |  3 ++-
 src/operator/subgraph/mkldnn/mkldnn_fc.cc         | 16 ++++++++++++++--
 src/operator/subgraph/mkldnn/mkldnn_fc_property.h | 10 ++++++++++
 tests/python/mkl/subgraphs/test_fc_subgraph.py    |  4 +++-
 4 files changed, 29 insertions(+), 4 deletions(-)

diff --git a/src/operator/subgraph/mkldnn/mkldnn_fc-inl.h b/src/operator/subgraph/mkldnn/mkldnn_fc-inl.h
index 14177c4..de8a256 100644
--- a/src/operator/subgraph/mkldnn/mkldnn_fc-inl.h
+++ b/src/operator/subgraph/mkldnn/mkldnn_fc-inl.h
@@ -36,7 +36,8 @@ static inline bool SupportMKLDNNFCEltwiseFusion(const std::string op_name) {
       op_name == "sqrt" ||
       op_name == "exp" ||
       op_name == "abs" ||
-      op_name == "clip") {
+      op_name == "clip" ||
+      op_name == "LeakyReLU") {
     return true;
   } else {
     return false;
diff --git a/src/operator/subgraph/mkldnn/mkldnn_fc.cc b/src/operator/subgraph/mkldnn/mkldnn_fc.cc
index 8f18b67..8eba465 100644
--- a/src/operator/subgraph/mkldnn/mkldnn_fc.cc
+++ b/src/operator/subgraph/mkldnn/mkldnn_fc.cc
@@ -286,8 +286,16 @@ void SgMKLDNNFCOp::Forward(const OpContext &ctx,
       if (fuse_requantize || mkldnn_param.enable_float_output) {
         float tmp_scale_ = 1.0f;
         if (fuse_requantize) {
-          tmp_scale_ =
-            GetQuantizeScale(output.dtype(), cached_min_output_, cached_max_output_) / data_scale_;
+          if (mkldnn_param.with_eltwise) {
+            tmp_scale_ = 1.0 / data_scale_;
+            full_param_.eltwise_param.scale =
+              GetQuantizeScale(output.dtype(), cached_min_output_, cached_max_output_);
+          } else {
+            tmp_scale_ =
+              GetQuantizeScale(output.dtype(),
+                               cached_min_output_,
+                               cached_max_output_) / data_scale_;
+          }
         } else {
           tmp_scale_ = 1.0 / data_scale_;
         }
@@ -405,6 +413,10 @@ static void SgMKLDNNFCParamParser(nnvm::NodeAttrs *attrs) {
       if (op_name == "Activation") {
         const ActivationParam act_param = nnvm::get<ActivationParam>(node->attrs.parsed);
         full_param.eltwise_param.alg = GetMKLDNNActAlgo(act_param);
+      } else if (op_name == "LeakyReLU") {
+        const auto act_param = nnvm::get<LeakyReLUParam>(node->attrs.parsed);
+        full_param.eltwise_param.alpha = act_param.slope;
+        full_param.eltwise_param.alg = GetMKLDNNActAlgo(act_param);
       } else if (op_name == "clip") {
         const ClipParam clip_param = nnvm::get<ClipParam>(node->attrs.parsed);
         full_param.eltwise_param.alg = mkldnn::algorithm::eltwise_bounded_relu;
diff --git a/src/operator/subgraph/mkldnn/mkldnn_fc_property.h b/src/operator/subgraph/mkldnn/mkldnn_fc_property.h
index 2fbfb85..9a0c777 100644
--- a/src/operator/subgraph/mkldnn/mkldnn_fc_property.h
+++ b/src/operator/subgraph/mkldnn/mkldnn_fc_property.h
@@ -102,6 +102,16 @@ class SgMKLDNNFCSelector : public SubgraphSelector {
             return true;
           }
         }
+        if (new_node.op() == Op::Get("LeakyReLU")) {
+          const LeakyReLUParam &param = nnvm::get<LeakyReLUParam>(new_node.attrs.parsed);
+          if (param.act_type == leakyrelu::kLeakyReLU ||
+              param.act_type == leakyrelu::kELU ||
+              param.act_type == leakyrelu::kGELU) {
+            matched_list_.push_back(&new_node);
+            status_ = kSuccess;
+            return true;
+          }
+        }
         if (!quantized_ && (new_node.op() == Op::Get("square") ||
             new_node.op() == Op::Get("sqrt") ||
             new_node.op() == Op::Get("exp"))) {
diff --git a/tests/python/mkl/subgraphs/test_fc_subgraph.py b/tests/python/mkl/subgraphs/test_fc_subgraph.py
index 398d686..07151ad 100644
--- a/tests/python/mkl/subgraphs/test_fc_subgraph.py
+++ b/tests/python/mkl/subgraphs/test_fc_subgraph.py
@@ -23,7 +23,7 @@ from mxnet.contrib import quantization
 from mxnet.gluon import nn
 from mxnet.test_utils import assert_almost_equal_with_err
 
-fc_post_ops_list=['relu', 'sigmoid', 'tanh', 'softrelu',
+fc_post_ops_list=['relu', 'sigmoid', 'tanh', 'softrelu', 'gelu', 'elu', 'leaky',
                   'square', 'square_root', 'abs', 'exp', 'bounded_relu']
 
 def test_float64_fallback():
@@ -71,6 +71,8 @@ def test_fc_eltwise(data_shape, use_bias, flatten, alg):
       fc_out = self.fc(x)
       if self.alg in ['relu', 'sigmoid', 'tanh', 'softrelu']:
         out = F.Activation(fc_out, act_type=self.alg)
+      elif self.alg in ['gelu', 'elu', 'leaky']:
+        out = F.LeakyReLU(fc_out, act_type=self.alg)
       elif self.alg == 'square':
         out = F.square(fc_out)
       elif self.alg == 'square_root':