From f5dbc811979f982f16d032b54d6234002de1bde6 Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Thu, 21 May 2020 09:06:47 -0700 Subject: [PATCH] revise activations --- python/mxnet/gluon/nn/activations.py | 18 +++++--- tests/python/unittest/test_numpy_gluon.py | 50 +++++++++++++++++++++++ 2 files changed, 63 insertions(+), 5 deletions(-) diff --git a/python/mxnet/gluon/nn/activations.py b/python/mxnet/gluon/nn/activations.py index 1b9ce91dd2aa..3cccc851e39b 100644 --- a/python/mxnet/gluon/nn/activations.py +++ b/python/mxnet/gluon/nn/activations.py @@ -139,7 +139,8 @@ def __init__(self, alpha_initializer=initializer.Constant(0.25), init=alpha_initializer) def hybrid_forward(self, F, x, alpha): - return F.LeakyReLU(x, gamma=alpha, act_type='prelu', name='fwd') + leaky_relu = F.npx.leaky_relu if is_np_array() else F.LeakyReLU + return leaky_relu(x, gamma=alpha, act_type='prelu', name='fwd') class ELU(HybridBlock): @@ -167,7 +168,8 @@ def __init__(self, alpha=1.0, **kwargs): self._alpha = alpha def hybrid_forward(self, F, x): - return F.LeakyReLU(x, act_type='elu', slope=self._alpha) + leaky_relu = F.npx.leaky_relu if is_np_array() else F.LeakyReLU + return leaky_relu(x, act_type='elu', slope=self._alpha) class SELU(HybridBlock): @@ -187,7 +189,9 @@ def __init__(self, **kwargs): super(SELU, self).__init__(**kwargs) def hybrid_forward(self, F, x): - return F.LeakyReLU(x, act_type='selu', name='fwd') + leaky_relu = F.npx.leaky_relu if is_np_array() else F.LeakyReLU + return leaky_relu(x, act_type='selu', name='fwd') + class GELU(HybridBlock): r""" @@ -206,7 +210,8 @@ def __init__(self, **kwargs): super(GELU, self).__init__(**kwargs) def hybrid_forward(self, F, x): - return F.LeakyReLU(x, act_type='gelu', name='fwd') + leaky_relu = F.npx.leaky_relu if is_np_array() else F.LeakyReLU + return leaky_relu(x, act_type='gelu', name='fwd') class Swish(HybridBlock): @@ -232,4 +237,7 @@ def __init__(self, beta=1.0, **kwargs): self._beta = beta def hybrid_forward(self, F, x): - return x * F.sigmoid(self._beta * x, name='fwd') + if is_np_array(): + return x * F.npx.sigmoid(self._beta * x) + else: + return x * F.sigmoid(self._beta * x, name='fwd') diff --git a/tests/python/unittest/test_numpy_gluon.py b/tests/python/unittest/test_numpy_gluon.py index 0d1e5fed59b3..2f2e2e05420d 100644 --- a/tests/python/unittest/test_numpy_gluon.py +++ b/tests/python/unittest/test_numpy_gluon.py @@ -25,6 +25,7 @@ import mxnet as mx from mxnet import gluon, autograd, np from mxnet.test_utils import use_np, assert_almost_equal, check_gluon_hybridize_consistency +from mxnet.gluon import nn from common import with_seed import random @@ -422,6 +423,55 @@ def hybrid_forward(self, F, valid_length): assert mx.test_utils.same(out1.asnumpy(), out2.asnumpy()) +@with_seed() +@use_np +def test_activations_leakyrelu(): + # Currently, all the activation tests, we will just test for runnable. + act_layer = nn.LeakyReLU(0.1) + out = act_layer(mx.np.random.uniform(size=(10,))) + out.asnumpy() + + +@with_seed() +@use_np +def test_activations_prelu(): + act_layer = nn.PReLU() + act_layer.initialize() + out = act_layer(mx.np.random.uniform(size=(10,))) + out.asnumpy() + + +@with_seed() +@use_np +def test_activations_elu(): + act_layer = nn.ELU(1.0) + out = act_layer(mx.np.random.uniform(size=(10,))) + out.asnumpy() + + +@with_seed() +@use_np +def test_activations_selu(): + act_layer = nn.SELU() + out = act_layer(mx.np.random.uniform(size=(10,))) + out.asnumpy() + + +@with_seed() +@use_np +def test_activations_gelu(): + act_layer = nn.GELU() + out = act_layer(mx.np.random.uniform(size=(10,))) + out.asnumpy() + + +@with_seed() +@use_np +def test_activations_swish(): + act_layer = nn.Swish() + out = act_layer(mx.np.random.uniform(size=(10,))) + out.asnumpy() + if __name__ == '__main__': import nose nose.runmodule()