From 4c0de4d92e9fbe7ad118738f25589f7ecdef3c8b Mon Sep 17 00:00:00 2001 From: Yixin Bao Date: Tue, 10 Sep 2019 08:50:57 +0800 Subject: [PATCH 1/2] avoid test relu at the origin due to discontinuous gradient --- tests/python/mkl/test_mkldnn.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/python/mkl/test_mkldnn.py b/tests/python/mkl/test_mkldnn.py index 3e623b59977d..2ffe3eaa233d 100644 --- a/tests/python/mkl/test_mkldnn.py +++ b/tests/python/mkl/test_mkldnn.py @@ -337,13 +337,17 @@ def check_pooling_training(stype): def test_activation(): def check_activation_training(stype): for shape in [(2, 3, 3), (2, 3, 2, 2)]: + eps = 1e-5 data_tmp = np.random.normal(-0.1, 1, size=shape) + # Avoid finite difference method inaccuracies due to discontinuous gradient at the origin. + # Here we replace small problematic inputs with 1.0. Repro issue with seed 851486559. + data_tmp[abs(data_tmp) < eps] = 1.0 data = mx.symbol.Variable('data', stype=stype) in_location = [mx.nd.array(data_tmp).tostype(stype)] test = mx.symbol.Activation(data, act_type="relu") - check_numeric_gradient(test, in_location, numeric_eps=1e-5, rtol=0.16, atol=1e-4) + check_numeric_gradient(test, in_location, numeric_eps=eps, rtol=0.16, atol=1e-4) stypes = ['row_sparse', 'default'] for stype in stypes: From 423e0626b3c2b5c9c61a17d14194f64630e7d21d Mon Sep 17 00:00:00 2001 From: JackieWu Date: Thu, 12 Sep 2019 13:21:44 +0800 Subject: [PATCH 2/2] retrigger CI