You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by GitBox <gi...@apache.org> on 2018/01/12 00:52:45 UTC

[GitHub] larroy commented on a change in pull request #9366: Refactor dropout operator to use ParallelRandom generator and also react deterministically when seeding

larroy commented on a change in pull request #9366: Refactor dropout operator to use ParallelRandom generator and also react deterministically when seeding
URL: https://github.com/apache/incubator-mxnet/pull/9366#discussion_r161116852
 
 

 ##########
 File path: src/operator/nn/dropout-inl.h
 ##########
 @@ -95,46 +73,75 @@ struct DropoutParam : public dmlc::Parameter<DropoutParam> {
 template<typename xpu, typename DType>
 class DropoutOp : public Operator {
  public:
+  /*!
+   * \brief Dropout kernel, compute dropout tensor
+   */
+  struct DropoutKernel {
+    /*!
+     * \brief Dropout kernel function
+     * \param id Thread number (0-based representing count)
+     * \param gen Random number generator
+     * \param N Total number of items in the output
+     * \param step Step between items, related to parallelism
+     * \param dropout_out Output dropout values
+     * \param mask_out  Output mask (is multiplied to create dropout output, may be 0)
+     * \param input_data Input data to perform the dropout on
+     * \param pkeep Dropout rate (keep when the generated random number is less than this value)
+     */
+    MSHADOW_XINLINE static void Map(int id,
+                                    RandGenerator<xpu, DType> gen,
 
 Review comment:
   should gen be by ref?

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services