From d7cc49d8b438280bda714a4d3690770738262cce Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 7 Jan 2023 21:00:28 -0500 Subject: [PATCH] un-revert the removal of the active=true method --- src/deprecations.jl | 11 ----------- test/layers/normalisation.jl | 4 ++-- 2 files changed, 2 insertions(+), 13 deletions(-) diff --git a/src/deprecations.jl b/src/deprecations.jl index 27c7bc2264..a99297649b 100644 --- a/src/deprecations.jl +++ b/src/deprecations.jl @@ -186,17 +186,6 @@ function update!(opt::Optimise.AbstractOptimiser, ::Params, grads::Union{Tuple, end -function dropout(rng, x, p; dims=:, active::Bool=true) - if active - NNlib.dropout(rng, x, p; dims) - else - Base.depwarn("Flux.dropout(...; active=false) is deprecated. Please branch outside the function, or call dropout(x, 0) if you must.", :dropout) - return x - end -end -dropout(x, p; kwargs...) = dropout(NNlib._rng_from_array(x), x, p; kwargs...) - - # v0.14 deprecations # Enable these when 0.14 is released, and delete const ClipGrad = Optimise.ClipValue etc: diff --git a/test/layers/normalisation.jl b/test/layers/normalisation.jl index 3385775b2f..6a3d85756d 100644 --- a/test/layers/normalisation.jl +++ b/test/layers/normalisation.jl @@ -56,10 +56,10 @@ evalwgrad(f, x...) = pullback(f, x...)[1] y = m(x) @test count(a->a == 0, y) > 50 - y = Flux.dropout(values(rng_kwargs)..., x, 0.9, active=true) + y = Flux.dropout(values(rng_kwargs)..., x, 0.9) # , active=true) @test count(a->a == 0, y) > 50 - y = Flux.dropout(values(rng_kwargs)..., x, 0.9, active=false) + y = Flux.dropout(values(rng_kwargs)..., x, 0.9 * 0) # , active=false) @test count(a->a == 0, y) == 0 # CPU RNGs map onto CPU ok