You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by GitBox <gi...@apache.org> on 2022/03/29 14:35:59 UTC

[GitHub] [incubator-mxnet] bgawrych commented on a change in pull request #20981: [FEATURE] Add oneDNN support for npi: exp, square and sqrt

bgawrych commented on a change in pull request #20981:
URL: https://github.com/apache/incubator-mxnet/pull/20981#discussion_r837551810



##########
File path: src/operator/tensor/elemwise_unary_op.h
##########
@@ -457,30 +457,67 @@ class UnaryOp : public OpBase {
 };
 
 #if MXNET_USE_ONEDNN == 1
-inline bool TanhStorageType(const nnvm::NodeAttrs& attrs,
-                            const int dev_mask,
-                            DispatchMode* dispatch_mode,
-                            std::vector<int>* in_attrs,
-                            std::vector<int>* out_attrs) {
+inline bool EltwiseStorageType(const nnvm::NodeAttrs& attrs,
+                               const int dev_mask,
+                               DispatchMode* dispatch_mode,
+                               std::vector<int>* in_attrs,
+                               std::vector<int>* out_attrs) {
   CHECK_EQ(in_attrs->size(), 1);
   CHECK_EQ(out_attrs->size(), 1);
 
   return DNNLStorageType(attrs, dev_mask, true, dispatch_mode, in_attrs, out_attrs);
 }
 
-inline void TanhComputeExCPU(const nnvm::NodeAttrs& attrs,
-                             const OpContext& ctx,
-                             const std::vector<mxnet::NDArray>& inputs,
-                             const std::vector<OpReqType>& req,
-                             const std::vector<mxnet::NDArray>& outputs) {
-  if (SupportDNNLTanh(inputs[0], outputs[0])) {
+template <typename OP>
+struct DNNLAlgorithm {};
+template <>
+struct DNNLAlgorithm<op::mshadow_op::plus> {
+  static const dnnl::algorithm value = dnnl::algorithm::binary_add;
+};
+template <>
+struct DNNLAlgorithm<op::mshadow_op::minus> {
+  static const dnnl::algorithm value = dnnl::algorithm::binary_sub;
+};
+template <>
+struct DNNLAlgorithm<op::mshadow_op::mul> {
+  static const dnnl::algorithm value = dnnl::algorithm::binary_mul;
+};
+template <>
+struct DNNLAlgorithm<op::mshadow_op::div> {
+  static const dnnl::algorithm value = dnnl::algorithm::binary_div;
+};
+template <>
+struct DNNLAlgorithm<op::mshadow_op::tanh> {
+  static const dnnl::algorithm value = dnnl::algorithm::eltwise_tanh;
+};
+template <>
+struct DNNLAlgorithm<op::mshadow_op::exp> {
+  static const dnnl::algorithm value = dnnl::algorithm::eltwise_exp;
+};
+template <>
+struct DNNLAlgorithm<op::mshadow_op::square> {
+  static const dnnl::algorithm value = dnnl::algorithm::eltwise_square;
+};
+template <>
+struct DNNLAlgorithm<op::mshadow_op::square_root> {
+  static const dnnl::algorithm value = dnnl::algorithm::eltwise_sqrt;
+};

Review comment:
       Can it be moved to some dnnl_eltwise-inl.h header?




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscribe@mxnet.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org