Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
[Numpy] Fix gluon activations (#18370)
Browse files Browse the repository at this point in the history
* fix activation numpy bug

* Update test_numpy_gluon.py

* fix
  • Loading branch information
sxjscience committed May 21, 2020
1 parent 67b5d31 commit 5343aef
Show file tree
Hide file tree
Showing 2 changed files with 63 additions and 5 deletions.
18 changes: 13 additions & 5 deletions python/mxnet/gluon/nn/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,8 @@ def __init__(self, alpha_initializer=initializer.Constant(0.25),
init=alpha_initializer)

def hybrid_forward(self, F, x, alpha):
return F.LeakyReLU(x, gamma=alpha, act_type='prelu', name='fwd')
leaky_relu = F.npx.leaky_relu if is_np_array() else F.LeakyReLU
return leaky_relu(x, gamma=alpha, act_type='prelu', name='fwd')


class ELU(HybridBlock):
Expand Down Expand Up @@ -167,7 +168,8 @@ def __init__(self, alpha=1.0, **kwargs):
self._alpha = alpha

def hybrid_forward(self, F, x):
return F.LeakyReLU(x, act_type='elu', slope=self._alpha)
leaky_relu = F.npx.leaky_relu if is_np_array() else F.LeakyReLU
return leaky_relu(x, act_type='elu', slope=self._alpha)


class SELU(HybridBlock):
Expand All @@ -187,7 +189,9 @@ def __init__(self, **kwargs):
super(SELU, self).__init__(**kwargs)

def hybrid_forward(self, F, x):
return F.LeakyReLU(x, act_type='selu', name='fwd')
leaky_relu = F.npx.leaky_relu if is_np_array() else F.LeakyReLU
return leaky_relu(x, act_type='selu', name='fwd')


class GELU(HybridBlock):
r"""
Expand All @@ -206,7 +210,8 @@ def __init__(self, **kwargs):
super(GELU, self).__init__(**kwargs)

def hybrid_forward(self, F, x):
return F.LeakyReLU(x, act_type='gelu', name='fwd')
leaky_relu = F.npx.leaky_relu if is_np_array() else F.LeakyReLU
return leaky_relu(x, act_type='gelu', name='fwd')


class Swish(HybridBlock):
Expand All @@ -232,4 +237,7 @@ def __init__(self, beta=1.0, **kwargs):
self._beta = beta

def hybrid_forward(self, F, x):
return x * F.sigmoid(self._beta * x, name='fwd')
if is_np_array():
return x * F.npx.sigmoid(self._beta * x)
else:
return x * F.sigmoid(self._beta * x, name='fwd')
50 changes: 50 additions & 0 deletions tests/python/unittest/test_numpy_gluon.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import mxnet as mx
from mxnet import gluon, autograd, np
from mxnet.test_utils import use_np, assert_almost_equal, check_gluon_hybridize_consistency
from mxnet.gluon import nn
from common import with_seed
import random

Expand Down Expand Up @@ -431,3 +432,52 @@ def hybrid_forward(self, F, valid_length):

assert mx.test_utils.same(out1.asnumpy(), out2.asnumpy())


@with_seed()
@use_np
def test_activations_leakyrelu():
# Currently, all the activation tests, we will just test for runnable.
act_layer = nn.LeakyReLU(0.1)
out = act_layer(mx.np.random.uniform(size=(10,)))
out.asnumpy()


@with_seed()
@use_np
def test_activations_prelu():
act_layer = nn.PReLU()
act_layer.initialize()
out = act_layer(mx.np.random.uniform(size=(10,)))
out.asnumpy()


@with_seed()
@use_np
def test_activations_elu():
act_layer = nn.ELU(1.0)
out = act_layer(mx.np.random.uniform(size=(10,)))
out.asnumpy()


@with_seed()
@use_np
def test_activations_selu():
act_layer = nn.SELU()
out = act_layer(mx.np.random.uniform(size=(10,)))
out.asnumpy()


@with_seed()
@use_np
def test_activations_gelu():
act_layer = nn.GELU()
out = act_layer(mx.np.random.uniform(size=(10,)))
out.asnumpy()


@with_seed()
@use_np
def test_activations_swish():
act_layer = nn.Swish()
out = act_layer(mx.np.random.uniform(size=(10,)))
out.asnumpy()

0 comments on commit 5343aef

Please sign in to comment.