You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by GitBox <gi...@apache.org> on 2017/11/17 06:24:09 UTC

[GitHub] feiyuvl opened a new issue #8690: [bug] dropout backward with openmp

feiyuvl opened a new issue #8690: [bug] dropout backward with openmp
URL: https://github.com/apache/incubator-mxnet/issues/8690
 
 
   ```
   virtual void Backward(const OpContext &ctx,
                           const std::vector<TBlob> &out_grad,
                           const std::vector<TBlob> &in_data,
                           const std::vector<TBlob> &out_data,
                           const std::vector<OpReqType> &req,
                           const std::vector<TBlob> &in_grad,
                           const std::vector<TBlob> &aux_states) {
       using namespace mshadow;
       using namespace mshadow::expr;
       CHECK_EQ(out_grad.size(), 1U);
       CHECK_EQ(in_grad.size(), 1U);
       Stream<xpu> *s = ctx.get_stream<xpu>();
       Tensor<xpu, 2, DType> grad = out_grad[dropout::kOut].FlatTo2D<xpu, DType>(s);
       Tensor<xpu, 2, DType> mask = out_data[dropout::kMask].FlatTo2D<xpu, DType>(s);
       Tensor<xpu, 2, DType> gdata = in_grad[dropout::kData].FlatTo2D<xpu, DType>(s);
       if (ctx.is_train || mode_ == dropout::kAlways) {
   #if !defined(__CUDACC__) && defined(USE_MKL) && defined(_OPENMP)
         DType* ingradptr = gdata.dptr_;
         DType* outgradptr = grad.dptr_;
         auto maskptr = reinterpret_cast<int*>(mask.dptr_);
         int count = mask.shape_[0]*mask.shape_[1];
         const float pk_1 = 1.0f / pkeep_;
         #pragma omp parallel for num_threads(engine::OpenMP::Get()->GetRecommendedOMPThreadCount())
         for (int i = 0; i < count; ++i) {
           ingradptr[i] = outgradptr[i] * maskptr[i] * pk_1;
         }
   #else  // USE_MKL && _OPENMP
         CHECK_EQ(grad.shape_.Size(), mask.shape_.Size());
         Assign(gdata, req[dropout::kData], grad * mask);
   #endif  // USE_MKL && _OPENMP
       } else {
         Assign(gdata, req[dropout::kData], F<mshadow_op::identity>(grad));
       }
     }
   ```
   The backward implement of dropout op using openmp is wrong, shouldn't multiply pk_1

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services