Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

[v1.7] Backport Gluon Activations Fix #18370 #18700

Merged
merged 1 commit into from
Jul 14, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 13 additions & 5 deletions python/mxnet/gluon/nn/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,8 @@ def __init__(self, alpha_initializer=initializer.Constant(0.25),
init=alpha_initializer)

def hybrid_forward(self, F, x, alpha):
return F.LeakyReLU(x, gamma=alpha, act_type='prelu', name='fwd')
leaky_relu = F.npx.leaky_relu if is_np_array() else F.LeakyReLU
return leaky_relu(x, gamma=alpha, act_type='prelu', name='fwd')


class ELU(HybridBlock):
Expand Down Expand Up @@ -167,7 +168,8 @@ def __init__(self, alpha=1.0, **kwargs):
self._alpha = alpha

def hybrid_forward(self, F, x):
return F.LeakyReLU(x, act_type='elu', slope=self._alpha)
leaky_relu = F.npx.leaky_relu if is_np_array() else F.LeakyReLU
return leaky_relu(x, act_type='elu', slope=self._alpha)


class SELU(HybridBlock):
Expand All @@ -187,7 +189,9 @@ def __init__(self, **kwargs):
super(SELU, self).__init__(**kwargs)

def hybrid_forward(self, F, x):
return F.LeakyReLU(x, act_type='selu', name='fwd')
leaky_relu = F.npx.leaky_relu if is_np_array() else F.LeakyReLU
return leaky_relu(x, act_type='selu', name='fwd')


class GELU(HybridBlock):
r"""
Expand All @@ -206,7 +210,8 @@ def __init__(self, **kwargs):
super(GELU, self).__init__(**kwargs)

def hybrid_forward(self, F, x):
return F.LeakyReLU(x, act_type='gelu', name='fwd')
leaky_relu = F.npx.leaky_relu if is_np_array() else F.LeakyReLU
return leaky_relu(x, act_type='gelu', name='fwd')


class Swish(HybridBlock):
Expand All @@ -232,4 +237,7 @@ def __init__(self, beta=1.0, **kwargs):
self._beta = beta

def hybrid_forward(self, F, x):
return x * F.sigmoid(self._beta * x, name='fwd')
if is_np_array():
return x * F.npx.sigmoid(self._beta * x)
else:
return x * F.sigmoid(self._beta * x, name='fwd')
50 changes: 50 additions & 0 deletions tests/python/unittest/test_numpy_gluon.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import mxnet as mx
from mxnet import gluon, autograd, np
from mxnet.test_utils import use_np, assert_almost_equal, check_gluon_hybridize_consistency
from mxnet.gluon import nn
from common import with_seed
import random

Expand Down Expand Up @@ -422,6 +423,55 @@ def hybrid_forward(self, F, valid_length):
assert mx.test_utils.same(out1.asnumpy(), out2.asnumpy())


@with_seed()
@use_np
def test_activations_leakyrelu():
# Currently, all the activation tests, we will just test for runnable.
act_layer = nn.LeakyReLU(0.1)
out = act_layer(mx.np.random.uniform(size=(10,)))
out.asnumpy()


@with_seed()
@use_np
def test_activations_prelu():
act_layer = nn.PReLU()
act_layer.initialize()
out = act_layer(mx.np.random.uniform(size=(10,)))
out.asnumpy()


@with_seed()
@use_np
def test_activations_elu():
act_layer = nn.ELU(1.0)
out = act_layer(mx.np.random.uniform(size=(10,)))
out.asnumpy()


@with_seed()
@use_np
def test_activations_selu():
act_layer = nn.SELU()
out = act_layer(mx.np.random.uniform(size=(10,)))
out.asnumpy()


@with_seed()
@use_np
def test_activations_gelu():
act_layer = nn.GELU()
out = act_layer(mx.np.random.uniform(size=(10,)))
out.asnumpy()


@with_seed()
@use_np
def test_activations_swish():
act_layer = nn.Swish()
out = act_layer(mx.np.random.uniform(size=(10,)))
out.asnumpy()

if __name__ == '__main__':
import nose
nose.runmodule()