You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by GitBox <gi...@apache.org> on 2019/02/08 21:22:43 UTC

[GitHub] szha commented on a change in pull request #14098: softmax for fp16 with fp32 accumulator

szha commented on a change in pull request #14098: softmax for fp16 with fp32 accumulator
URL: https://github.com/apache/incubator-mxnet/pull/14098#discussion_r255239613
 
 

 ##########
 File path: src/operator/nn/softmax-inl.h
 ##########
 @@ -36,22 +36,22 @@ namespace op {
 namespace mxnet_op {
 
 struct softmax_fwd {
-  template<typename DType>
-  MSHADOW_XINLINE static DType Map(DType a, DType b) {
+  template<typename DType, typename AType>
+  MSHADOW_XINLINE static DType Map(DType a, AType b) {
     return DType(expf(a)/b);
   }
 };
 
 
 struct log_softmax_fwd {
-  template<typename DType>
-  MSHADOW_XINLINE static DType Map(DType a, DType b) {
+  template<typename DType, typename AType>
+  MSHADOW_XINLINE static DType Map(DType a, AType b) {
     return DType(a - logf(b));
 
 Review comment:
   Thanks. Good point. The same should apply to the gradient kernels 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services