You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by jx...@apache.org on 2017/11/02 17:29:15 UTC
[incubator-mxnet] branch master updated: fix makenonlossgrad bug
(#8508)
This is an automated email from the ASF dual-hosted git repository.
jxie pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git
The following commit(s) were added to refs/heads/master by this push:
new 9e0432a fix makenonlossgrad bug (#8508)
9e0432a is described below
commit 9e0432a0e76607e023dbe2dfb42d62fac4bce683
Author: Eric Junyuan Xie <pi...@users.noreply.github.com>
AuthorDate: Thu Nov 2 10:29:12 2017 -0700
fix makenonlossgrad bug (#8508)
---
src/operator/operator_common.h | 4 ++--
src/operator/tensor/broadcast_reduce_op_index.cc | 5 +++--
src/operator/tensor/elemwise_unary_op_basic.cc | 12 ++++++------
3 files changed, 11 insertions(+), 10 deletions(-)
diff --git a/src/operator/operator_common.h b/src/operator/operator_common.h
index 63254e7..875c79c 100644
--- a/src/operator/operator_common.h
+++ b/src/operator/operator_common.h
@@ -412,8 +412,8 @@ inline std::vector<nnvm::NodeEntry> MakeZeroGradNodes(
// check whether all output grads are zero.
inline bool CheckGradAllZero(const std::vector<nnvm::NodeEntry>& ograds) {
- const auto zero_op = nnvm::Op::Get("_zeros");
- const auto zero_like_op = nnvm::Op::Get("zeros_like");
+ static const auto zero_op = nnvm::Op::Get("_zeros");
+ static const auto zero_like_op = nnvm::Op::Get("zeros_like");
if (!ograds.size()) return false;
for (const auto& grad : ograds) {
if (!grad.node) return false;
diff --git a/src/operator/tensor/broadcast_reduce_op_index.cc b/src/operator/tensor/broadcast_reduce_op_index.cc
index 6887955..98cd736 100644
--- a/src/operator/tensor/broadcast_reduce_op_index.cc
+++ b/src/operator/tensor/broadcast_reduce_op_index.cc
@@ -154,8 +154,9 @@ Examples::
.set_attr<FCompute>("FCompute<cpu>", PickOpForward<cpu>)
.set_attr<nnvm::FGradient>("FGradient",
[](const nnvm::NodePtr& n, const std::vector<nnvm::NodeEntry>& ograds) {
- auto ret = MakeNonlossGradNode("_backward_pick", n, ograds,
- {n->inputs[1]}, n->attrs.dict);
+ if (CheckGradAllZero(ograds)) return MakeZeroGradNodes(n, ograds);
+ auto ret = MakeGradNode("_backward_pick", n, {ograds[0], n->inputs[1]},
+ n->attrs.dict);
auto p = MakeNode("zeros_like", n->attrs.name + "_index_backward",
{n->inputs[1]}, nullptr, &n);
ret.emplace_back(nnvm::NodeEntry{p, 0, 0});
diff --git a/src/operator/tensor/elemwise_unary_op_basic.cc b/src/operator/tensor/elemwise_unary_op_basic.cc
index 55690c9..c356c58 100644
--- a/src/operator/tensor/elemwise_unary_op_basic.cc
+++ b/src/operator/tensor/elemwise_unary_op_basic.cc
@@ -241,9 +241,9 @@ NNVM_REGISTER_OP(_identity_with_attr_like_rhs)
.set_attr<nnvm::FGradient>(
"FGradient", [](const nnvm::NodePtr& n,
const std::vector<nnvm::NodeEntry>& ograds) {
- auto lhs = MakeNonlossGradNode(
- "_backward_copy", n, ograds, {},
- std::unordered_map<std::string, std::string>());
+ if (CheckGradAllZero(ograds)) return MakeZeroGradNodes(n, ograds);
+ auto lhs = MakeGradNode("_backward_copy", n, ograds,
+ std::unordered_map<std::string, std::string>());
auto ng = MakeNode("zeros_like", n->attrs.name + "_rhs_backward",
{n->inputs[1]}, nullptr, &n);
lhs.push_back(nnvm::NodeEntry{ng, 0, 0});
@@ -284,9 +284,9 @@ NNVM_REGISTER_OP(reshape_like)
.set_attr<nnvm::FGradient>(
"FGradient", [](const nnvm::NodePtr& n,
const std::vector<nnvm::NodeEntry>& ograds) {
- auto lhs = MakeNonlossGradNode(
- "_backward_copy", n, ograds, {},
- std::unordered_map<std::string, std::string>());
+ if (CheckGradAllZero(ograds)) return MakeZeroGradNodes(n, ograds);
+ auto lhs = MakeGradNode("_backward_copy", n, ograds,
+ std::unordered_map<std::string, std::string>());
auto ng = MakeNode("zeros_like", n->attrs.name + "_rhs_backward",
{n->inputs[1]}, nullptr, &n);
lhs.push_back(nnvm::NodeEntry{ng, 0, 0});
--
To stop receiving notification emails like this one, please contact
['"commits@mxnet.apache.org" <co...@mxnet.apache.org>'].