You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by la...@apache.org on 2020/08/24 17:13:00 UTC

[incubator-mxnet] branch master updated: Fix LeakyRelu behaviour on empty input (#18934) (#18996)

This is an automated email from the ASF dual-hosted git repository.

lausen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git


The following commit(s) were added to refs/heads/master by this push:
     new 3c4ac19  Fix LeakyRelu behaviour on empty input (#18934) (#18996)
3c4ac19 is described below

commit 3c4ac19daa3e645d918692b864ea19640f7e0314
Author: bgawrych <ba...@intel.com>
AuthorDate: Mon Aug 24 19:12:12 2020 +0200

    Fix LeakyRelu behaviour on empty input (#18934) (#18996)
    
    * Fix LeakyRelu behaviour on empty input
    
    * Remove duplicated declarations
---
 src/operator/leaky_relu-inl.h                            |  2 ++
 src/operator/leaky_relu.cc                               |  2 ++
 src/operator/nn/mkldnn/mkldnn_act-inl.h                  |  7 -------
 src/operator/quantization/mkldnn/mkldnn_quantized_act.cc |  2 +-
 tests/python/unittest/test_smoke.py                      | 11 +++++++++++
 5 files changed, 16 insertions(+), 8 deletions(-)

diff --git a/src/operator/leaky_relu-inl.h b/src/operator/leaky_relu-inl.h
index 945bd00..87755ec 100644
--- a/src/operator/leaky_relu-inl.h
+++ b/src/operator/leaky_relu-inl.h
@@ -353,6 +353,7 @@ void LeakyReLUCompute(const nnvm::NodeAttrs& attrs,
                       const OpContext& ctx, const std::vector<TBlob>& inputs,
                       const std::vector<OpReqType>& req,
                       const std::vector<TBlob>& outputs) {
+  if (inputs[0].Size() == 0U) return;
   const LeakyReLUParam &param = nnvm::get<LeakyReLUParam>(attrs.parsed);
   const std::vector<TBlob> no_use_but_adapt_origin_api;
   size_t expected = param.act_type == leakyrelu::kPReLU ? 2 : 1;
@@ -370,6 +371,7 @@ void LeakyReLUGradCompute(const nnvm::NodeAttrs& attrs,
                           const std::vector<TBlob>& inputs,
                           const std::vector<OpReqType>& req,
                           const std::vector<TBlob>& outputs) {
+  if (inputs[0].Size() == 0U) return;
   const LeakyReLUParam& param = nnvm::get<LeakyReLUParam>(attrs.parsed);
   const std::vector<TBlob> no_use_but_adapt_origin_api;
   // inputs: out_grad, input_data, input_gamma, output, output_mask
diff --git a/src/operator/leaky_relu.cc b/src/operator/leaky_relu.cc
index 8a1a075..6690834 100644
--- a/src/operator/leaky_relu.cc
+++ b/src/operator/leaky_relu.cc
@@ -90,6 +90,7 @@ static void LeakyReLUComputeExCPU(const nnvm::NodeAttrs& attrs,
                                   const std::vector<NDArray>& inputs,
                                   const std::vector<OpReqType>& req,
                                   const std::vector<NDArray>& outputs) {
+  if (inputs[0].shape().Size() == 0U) return;
   const LeakyReLUParam& param = nnvm::get<LeakyReLUParam>(attrs.parsed);
   size_t expected = param.act_type == leakyrelu::kPReLU ? 2 : 1;
   CHECK_EQ(inputs.size(), expected);
@@ -107,6 +108,7 @@ void LeakyReLUGradComputeExCPU(const nnvm::NodeAttrs& attrs,
                                const std::vector<NDArray>& inputs,
                                const std::vector<OpReqType>& req,
                                const std::vector<NDArray>& outputs) {
+  if (inputs[0].shape().Size() == 0U) return;
   const LeakyReLUParam& param = nnvm::get<LeakyReLUParam>(attrs.parsed);
   if (SupportMKLDNNLeakyRelu(param, inputs[0])) {
     std::vector<NDArray> in_data{inputs[0], inputs[1]};
diff --git a/src/operator/nn/mkldnn/mkldnn_act-inl.h b/src/operator/nn/mkldnn/mkldnn_act-inl.h
index cf3e4f4..70bf16a 100644
--- a/src/operator/nn/mkldnn/mkldnn_act-inl.h
+++ b/src/operator/nn/mkldnn/mkldnn_act-inl.h
@@ -74,13 +74,6 @@ MKLDNNActForward &GetActForward(const MKLDNNActParam& param,
                                 const OpContext &ctx, const NDArray &in_data,
                                 const mkldnn::memory &in_mem);
 
-void MKLDNNActivationForward(const nnvm::NodeAttrs& attrs, const OpContext &ctx,
-                             const NDArray &in_data, const OpReqType &req,
-                             const NDArray &out_data);
-void MKLDNNLeakyReluForward(const nnvm::NodeAttrs& attrs, const OpContext &ctx,
-                            const NDArray &in_data, const OpReqType &req,
-                            const NDArray &out_data);
-
 mkldnn::eltwise_backward::primitive_desc GetActBwdDescImpl(
     const MKLDNNActParam &param, const mkldnn::memory &input_mem,
     const mkldnn::memory &diff_dst_memory);
diff --git a/src/operator/quantization/mkldnn/mkldnn_quantized_act.cc b/src/operator/quantization/mkldnn/mkldnn_quantized_act.cc
index 86acac8..f7520d5 100644
--- a/src/operator/quantization/mkldnn/mkldnn_quantized_act.cc
+++ b/src/operator/quantization/mkldnn/mkldnn_quantized_act.cc
@@ -24,7 +24,7 @@
 */
 #if MXNET_USE_MKLDNN == 1
 
-#include "../../nn/mkldnn/mkldnn_act-inl.h"
+#include "../../nn/mkldnn/mkldnn_ops-inl.h"
 #include "../quantization_utils.h"
 
 namespace mxnet {
diff --git a/tests/python/unittest/test_smoke.py b/tests/python/unittest/test_smoke.py
index 26cd4e5..c14310c 100644
--- a/tests/python/unittest/test_smoke.py
+++ b/tests/python/unittest/test_smoke.py
@@ -56,3 +56,14 @@ def test_18933_channel_0():
     with autograd.record():
         a = npx.instance_norm(arr, gamma, beta)
     a.backward()
+
+@use_np
+@with_environment('MXNET_ENGINE_TYPE', 'NaiveEngine')
+def test_18934_empty_leaky_relu():
+    arr = np.random.rand(0,2)
+    arr_grad = np.empty_like(arr)
+
+    autograd.mark_variables([arr], [arr_grad])
+    with autograd.record():
+        res = npx.leaky_relu(arr)
+    res.backward()
\ No newline at end of file