You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by pa...@apache.org on 2019/10/19 07:12:41 UTC

[incubator-mxnet] branch mkldnn-v1.0 updated: [mkldnn-v1.0]Minor fix for leakyrelu compile flag (#16519)

This is an automated email from the ASF dual-hosted git repository.

patriczhao pushed a commit to branch mkldnn-v1.0
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git


The following commit(s) were added to refs/heads/mkldnn-v1.0 by this push:
     new 6eadab3  [mkldnn-v1.0]Minor fix for leakyrelu compile flag (#16519)
6eadab3 is described below

commit 6eadab39d0fcabcfe13013d636fcf22975bb8f99
Author: Xinyu Chen <xi...@intel.com>
AuthorDate: Sat Oct 19 15:12:10 2019 +0800

    [mkldnn-v1.0]Minor fix for leakyrelu compile flag (#16519)
    
    * change to MXNET_USE_MKLDNN == 100
    
    * trigger
---
 src/operator/leaky_relu.cc | 16 ++++++++--------
 1 file changed, 8 insertions(+), 8 deletions(-)

diff --git a/src/operator/leaky_relu.cc b/src/operator/leaky_relu.cc
index 4d1c5ca1..1a1238c 100644
--- a/src/operator/leaky_relu.cc
+++ b/src/operator/leaky_relu.cc
@@ -25,10 +25,10 @@
 */
 
 #include "./leaky_relu-inl.h"
-#if MXNET_USE_MKLDNN == 1
+#if MXNET_USE_MKLDNN == 100
 #include "./nn/mkldnn/mkldnn_base-inl.h"
 #include "./nn/mkldnn/mkldnn_ops-inl.h"
-#endif  // MXNET_USE_MKLDNN == 1
+#endif  // MXNET_USE_MKLDNN == 100
 
 #include <nnvm/op_attr_types.h>
 namespace mxnet {
@@ -84,7 +84,7 @@ static bool LeakyReLUShape(const nnvm::NodeAttrs& attrs,
   return true;
 }
 
-#if MXNET_USE_MKLDNN == 1
+#if MXNET_USE_MKLDNN == 100
 static void LeakyReLUComputeExCPU(const nnvm::NodeAttrs& attrs,
                                   const OpContext& ctx,
                                   const std::vector<NDArray>& inputs,
@@ -139,7 +139,7 @@ inline static bool BackwardLeakyReLUStorageType(const nnvm::NodeAttrs& attrs,
   return MKLDNNStorageType(attrs, dev_mask, SupportMKLDNNLeakyRelu(param),
                            dispatch_mode, in_attrs, out_attrs);
 }
-#endif  // MXNET_USE_MKLDNN == 1
+#endif  // MXNET_USE_MKLDNN == 100
 
 NNVM_REGISTER_OP(LeakyReLU)
 .describe(R"code(Applies Leaky rectified linear unit activation element-wise to the input.
@@ -169,7 +169,7 @@ The following modified ReLU Activation functions are supported:
   return param.act_type == leakyrelu::kRReLU ? 2 : 1;
 })
 .set_attr_parser(ParamParser<LeakyReLUParam>)
-#if MXNET_USE_MKLDNN == 1
+#if MXNET_USE_MKLDNN == 100
 .set_attr<FInferStorageType>("FInferStorageType", LeakyReLUStorageType)
 #endif
 .set_attr<nnvm::FListInputNames>("FListInputNames",
@@ -187,7 +187,7 @@ The following modified ReLU Activation functions are supported:
 .set_attr<mxnet::FInferShape>("FInferShape", LeakyReLUShape)
 .set_attr<nnvm::FInferType>("FInferType", LeakyReLUType)
 .set_attr<FCompute>("FCompute<cpu>", LeakyReLUCompute<cpu>)
-#if MXNET_USE_MKLDNN == 1
+#if MXNET_USE_MKLDNN == 100
 .set_attr<bool>("TIsMKLDNN", true)
 .set_attr<FComputeEx>("FComputeEx<cpu>", LeakyReLUComputeExCPU)
 #endif
@@ -211,7 +211,7 @@ NNVM_REGISTER_OP(_backward_LeakyReLU)
   return param.act_type == leakyrelu::kPReLU ? 2 : 1;
 })
 .set_attr<nnvm::TIsBackward>("TIsBackward", true)
-#if MXNET_USE_MKLDNN == 1
+#if MXNET_USE_MKLDNN == 100
 .set_attr<FInferStorageType>("FInferStorageType", BackwardLeakyReLUStorageType)
 #endif
 .set_attr<nnvm::FInplaceOption>("FInplaceOption", [](const NodeAttrs& attrs){
@@ -221,7 +221,7 @@ NNVM_REGISTER_OP(_backward_LeakyReLU)
   return std::vector<ResourceRequest>{ResourceRequest::kTempSpace};
 })
 .set_attr_parser(ParamParser<LeakyReLUParam>)
-#if MXNET_USE_MKLDNN == 1
+#if MXNET_USE_MKLDNN == 100
 .set_attr<bool>("TIsMKLDNN", true)
 .set_attr<FComputeEx>("FComputeEx<cpu>", LeakyReLUGradComputeExCPU)
 #endif