Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Mark test_dropout as flaky (#19553)
Browse files Browse the repository at this point in the history
Two issues.

Issue 1: #14288

Issue 2:

[2020-11-17T06:58:34.678Z]         def check_passthrough(ratio, shape, cudnn_off=True):
[2020-11-17T06:58:34.678Z]             # test inference_mode forward and then backward
[2020-11-17T06:58:34.678Z]             a = mx.random.uniform(shape=shape)
[2020-11-17T06:58:34.678Z]             a.attach_grad()
[2020-11-17T06:58:34.678Z]             with mx.autograd.record(train_mode=False):
[2020-11-17T06:58:34.678Z]                 b = mx.nd.Dropout(a, ratio, cudnn_off=cudnn_off) # dropout acts as identity
[2020-11-17T06:58:34.678Z]             b.backward()
[2020-11-17T06:58:34.678Z]             assert_almost_equal(a.grad.asnumpy(), mx.nd.ones_like(b).asnumpy())
[2020-11-17T06:58:34.678Z]     
[2020-11-17T06:58:34.678Z]         shape = (100, 100)
[2020-11-17T06:58:34.678Z]         check_dropout_ratio(0.5, shape)
[2020-11-17T06:58:34.678Z]         check_dropout_ratio(0.0, shape)
[2020-11-17T06:58:34.678Z] >       check_dropout_ratio(1.0, shape)
[...]
[2020-11-17T06:58:34.678Z]         # Hopefully should be within ratio/2 %
[2020-11-17T06:58:34.678Z]         error = abs(output_sum - input_sum) / input_sum
[2020-11-17T06:58:34.678Z]         if ratio == 1.0:
[2020-11-17T06:58:34.678Z] >           assert output_zeroes == len(input)
[2020-11-17T06:58:34.678Z] E           assert 9999 == 10000
[2020-11-17T06:58:34.678Z] E             +9999
[2020-11-17T06:58:34.678Z] E             -10000
  • Loading branch information
leezu committed Nov 25, 2020
1 parent f97544e commit 16be29d
Showing 1 changed file with 22 additions and 23 deletions.
45 changes: 22 additions & 23 deletions tests/python/unittest/test_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -6463,8 +6463,7 @@ def test_stack():
check_numeric_gradient(out, inputs)


## TODO: test fails intermittently when cudnn on. temporarily disabled cudnn until gets fixed.
## tracked at https://github.com/apache/incubator-mxnet/issues/14288
@pytest.mark.flaky
def test_dropout():
def zero_count(array, ratio):
zeros = 0
Expand Down Expand Up @@ -6591,18 +6590,18 @@ def check_passthrough(ratio, shape, cudnn_off=True):
check_dropout_ratio(1.0, shape)
check_dropout_ratio(0.75, shape)
check_dropout_ratio(0.25, shape)
# check_dropout_ratio(0.5, shape, cudnn_off=False)
# check_dropout_ratio(0.0, shape, cudnn_off=False)
# check_dropout_ratio(1.0, shape, cudnn_off=False)
# check_dropout_ratio(0.75, shape, cudnn_off=False)
# check_dropout_ratio(0.25, shape, cudnn_off=False)
check_dropout_ratio(0.5, shape, cudnn_off=False)
check_dropout_ratio(0.0, shape, cudnn_off=False)
check_dropout_ratio(1.0, shape, cudnn_off=False)
check_dropout_ratio(0.75, shape, cudnn_off=False)
check_dropout_ratio(0.25, shape, cudnn_off=False)

check_passthrough(0.5, shape)
check_passthrough(0.0, shape)
check_passthrough(1.0, shape)
# check_passthrough(0.5, shape, cudnn_off=False)
# check_passthrough(0.0, shape, cudnn_off=False)
# check_passthrough(1.0, shape, cudnn_off=False)
check_passthrough(0.5, shape, cudnn_off=False)
check_passthrough(0.0, shape, cudnn_off=False)
check_passthrough(1.0, shape, cudnn_off=False)

nshape = (10, 10, 10, 10)
with mx.autograd.train_mode():
Expand All @@ -6619,19 +6618,19 @@ def check_passthrough(ratio, shape, cudnn_off=True):
check_dropout_axes(0.25, nshape, axes = (0, 1, 2))
check_dropout_axes(0.25, nshape, axes = (0, 2, 3))
check_dropout_axes(0.25, nshape, axes = (1, 2, 3))
# check_dropout_axes(0.25, nshape, axes = (0,), cudnn_off=False)
# check_dropout_axes(0.25, nshape, axes = (1,), cudnn_off=False)
# check_dropout_axes(0.25, nshape, axes = (2,), cudnn_off=False)
# check_dropout_axes(0.25, nshape, axes = (3,), cudnn_off=False)
# check_dropout_axes(0.25, nshape, axes = (0, 1), cudnn_off=False)
# check_dropout_axes(0.25, nshape, axes = (0, 2), cudnn_off=False)
# check_dropout_axes(0.25, nshape, axes = (0, 3), cudnn_off=False)
# check_dropout_axes(0.25, nshape, axes = (1, 2), cudnn_off=False)
# check_dropout_axes(0.25, nshape, axes = (1, 3), cudnn_off=False)
# check_dropout_axes(0.25, nshape, axes = (2, 3), cudnn_off=False)
# check_dropout_axes(0.25, nshape, axes = (0, 1, 2), cudnn_off=False)
# check_dropout_axes(0.25, nshape, axes = (0, 2, 3), cudnn_off=False)
# check_dropout_axes(0.25, nshape, axes = (1, 2, 3), cudnn_off=False)
check_dropout_axes(0.25, nshape, axes = (0,), cudnn_off=False)
check_dropout_axes(0.25, nshape, axes = (1,), cudnn_off=False)
check_dropout_axes(0.25, nshape, axes = (2,), cudnn_off=False)
check_dropout_axes(0.25, nshape, axes = (3,), cudnn_off=False)
check_dropout_axes(0.25, nshape, axes = (0, 1), cudnn_off=False)
check_dropout_axes(0.25, nshape, axes = (0, 2), cudnn_off=False)
check_dropout_axes(0.25, nshape, axes = (0, 3), cudnn_off=False)
check_dropout_axes(0.25, nshape, axes = (1, 2), cudnn_off=False)
check_dropout_axes(0.25, nshape, axes = (1, 3), cudnn_off=False)
check_dropout_axes(0.25, nshape, axes = (2, 3), cudnn_off=False)
check_dropout_axes(0.25, nshape, axes = (0, 1, 2), cudnn_off=False)
check_dropout_axes(0.25, nshape, axes = (0, 2, 3), cudnn_off=False)
check_dropout_axes(0.25, nshape, axes = (1, 2, 3), cudnn_off=False)


@pytest.mark.skip(reason="test fails intermittently. temporarily disabled till it gets fixed. tracked at https://github.com/apache/incubator-mxnet/issues/11290")
Expand Down

0 comments on commit 16be29d

Please sign in to comment.