You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by sk...@apache.org on 2018/09/20 04:53:45 UTC

[incubator-mxnet] branch master updated: fix test_activation by lowering threshold + validate eps for check_numeric_gradient (#12560)

This is an automated email from the ASF dual-hosted git repository.

skm pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git


The following commit(s) were added to refs/heads/master by this push:
     new 97a7457  fix test_activation by lowering threshold + validate eps for check_numeric_gradient (#12560)
97a7457 is described below

commit 97a7457b23e26167c2b7f434be59f391b7e8e25a
Author: Alexander Zai <az...@gmail.com>
AuthorDate: Wed Sep 19 21:53:32 2018 -0700

    fix test_activation by lowering threshold + validate eps for check_numeric_gradient (#12560)
    
    * remove disable flag
    
    * finite difference should use mean
    
    * lower numerical eps
    
    * set threshold to 1e-5
    
    * check numeric_eps
    
    * update assertion
    
    * fix lint
---
 python/mxnet/test_utils.py      | 3 +++
 tests/python/mkl/test_mkldnn.py | 3 +--
 2 files changed, 4 insertions(+), 2 deletions(-)

diff --git a/python/mxnet/test_utils.py b/python/mxnet/test_utils.py
index 63b75cf..49a1a0f 100644
--- a/python/mxnet/test_utils.py
+++ b/python/mxnet/test_utils.py
@@ -828,6 +828,9 @@ def check_numeric_gradient(sym, location, aux_states=None, numeric_eps=1e-3, rto
     ..[1] https://github.com/Theano/Theano/blob/master/theano/gradient.py
     """
     assert dtype in (np.float16, np.float32, np.float64)
+    # cannot use finite differences with small eps without high precision
+    if dtype in (np.float32, np.float16):
+        assert numeric_eps >= 1e-5
     if ctx is None:
         ctx = default_context()
 
diff --git a/tests/python/mkl/test_mkldnn.py b/tests/python/mkl/test_mkldnn.py
index e597d0f5..53e4051 100644
--- a/tests/python/mkl/test_mkldnn.py
+++ b/tests/python/mkl/test_mkldnn.py
@@ -281,7 +281,6 @@ def test_pooling():
         check_pooling_training(stype)
 
 
-@unittest.skip("Flaky test: https://github.com/apache/incubator-mxnet/issues/12377")
 @with_seed()
 def test_activation():
     def check_activation_training(stype):
@@ -292,7 +291,7 @@ def test_activation():
             in_location = [mx.nd.array(data_tmp).tostype(stype)]
 
             test = mx.symbol.Activation(data, act_type="relu")
-            check_numeric_gradient(test, in_location, numeric_eps=1e-2, rtol=0.16, atol=1e-4)
+            check_numeric_gradient(test, in_location, numeric_eps=1e-5, rtol=0.16, atol=1e-4)
 
     stypes = ['row_sparse', 'default']
     for stype in stypes: