You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by jx...@apache.org on 2017/08/31 06:13:14 UTC
[incubator-mxnet] branch master updated: update epsilon to 1e-12
(#7670)
This is an automated email from the ASF dual-hosted git repository.
jxie pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git
The following commit(s) were added to refs/heads/master by this push:
new 2dbadf0 update epsilon to 1e-12 (#7670)
2dbadf0 is described below
commit 2dbadf0f23a08ba6ba9d0f3b774655b21b84c6dc
Author: Sheng Zha <sz...@users.noreply.github.com>
AuthorDate: Wed Aug 30 23:13:12 2017 -0700
update epsilon to 1e-12 (#7670)
---
python/mxnet/gluon/loss.py | 4 ++--
python/mxnet/metric.py | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/python/mxnet/gluon/loss.py b/python/mxnet/gluon/loss.py
index bb45e89..326399c 100644
--- a/python/mxnet/gluon/loss.py
+++ b/python/mxnet/gluon/loss.py
@@ -191,7 +191,7 @@ class SigmoidBinaryCrossEntropyLoss(Loss):
max_val = F.maximum(-output, 0)
loss = output - output*label + max_val + F.log(F.exp(-max_val)+F.exp(-output-max_val))
else:
- loss = -(F.log(output+1e-8)*label + F.log(1.-output+1e-8)*(1.-label))
+ loss = -(F.log(output+1e-12)*label + F.log(1.-output+1e-12)*(1.-label))
loss = _apply_weighting(F, loss, self._weight, sample_weight)
return F.mean(loss, axis=self._batch_axis, exclude=True)
@@ -294,7 +294,7 @@ class KLDivLoss(Loss):
def hybrid_forward(self, F, output, label, sample_weight=None):
if not self._from_logits:
output = F.log_softmax(output)
- loss = label * (F.log(label+1e-8) - output)
+ loss = label * (F.log(label+1e-12) - output)
loss = _apply_weighting(F, loss, self._weight, sample_weight)
return F.mean(loss, axis=self._batch_axis, exclude=True)
diff --git a/python/mxnet/metric.py b/python/mxnet/metric.py
index a33b00a..1b192f2 100644
--- a/python/mxnet/metric.py
+++ b/python/mxnet/metric.py
@@ -886,7 +886,7 @@ class CrossEntropy(EvalMetric):
>>> print ce.get()
('cross-entropy', 0.57159948348999023)
"""
- def __init__(self, eps=1e-8, name='cross-entropy',
+ def __init__(self, eps=1e-12, name='cross-entropy',
output_names=None, label_names=None):
super(CrossEntropy, self).__init__(
name, eps=eps,
--
To stop receiving notification emails like this one, please contact
['"commits@mxnet.apache.org" <co...@mxnet.apache.org>'].