You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by la...@apache.org on 2021/02/17 18:28:41 UTC

[incubator-mxnet] 01/01: Mark test_masked_softmax as flaky

This is an automated email from the ASF dual-hosted git repository.

lausen pushed a commit to branch leezu-patch-3
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git

commit 71d7661ff3419d91cbffaa79887740cd964e91e1
Author: Leonard Lausen <la...@amazon.com>
AuthorDate: Wed Feb 17 13:26:29 2021 -0500

    Mark test_masked_softmax as flaky
---
 tests/python/unittest/test_operator.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/tests/python/unittest/test_operator.py b/tests/python/unittest/test_operator.py
index cc94db5..e4badc0 100644
--- a/tests/python/unittest/test_operator.py
+++ b/tests/python/unittest/test_operator.py
@@ -4974,6 +4974,7 @@ def np_masked_log_softmax_grad(out, grad_out, mask, axis=-1, temperature=1.0):
 @pytest.mark.parametrize('n_broadcast_axis', [0, 1, 2])
 @pytest.mark.parametrize('temperature', [1, 5, 9 ,11])
 @pytest.mark.parametrize('normalize', [True])
+@pytest.mark.flaky
 def test_masked_softmax(dtype, axis, ndims, n_broadcast_axis, temperature, normalize):
     n_broadcast_axis = min(n_broadcast_axis, ndims - 1)
     shape = rand_shape_nd(ndims, dim=10)
@@ -9452,4 +9453,4 @@ def test_zero_sized_dim():
 
     seq_last()
     seq_reverse()
-    seq_mask()
\ No newline at end of file
+    seq_mask()