Skip to content

Commit

Permalink
[BUGFIX] fix ELU function will appear nan when calculating the gradie…
Browse files Browse the repository at this point in the history
…nt (apache#14673)

* fix ELU

* fix

* fix

* fix

* fix

* fix
  • Loading branch information
fierceX authored and haohuw committed Jun 23, 2019
1 parent 0d647dd commit a0b57b0
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 2 deletions.
3 changes: 2 additions & 1 deletion python/mxnet/gluon/nn/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,12 +153,13 @@ class ELU(HybridBlock):
Outputs:
- **out**: output tensor with the same shape as `data`.
"""

def __init__(self, alpha=1.0, **kwargs):
super(ELU, self).__init__(**kwargs)
self._alpha = alpha

def hybrid_forward(self, F, x):
return F.where(x > 0, x, self._alpha * (F.exp(x) - 1.0))
return F.LeakyReLU(x, act_type='elu', slope=self._alpha)


class SELU(HybridBlock):
Expand Down
2 changes: 1 addition & 1 deletion tests/python/unittest/test_gluon.py
Original file line number Diff line number Diff line change
Expand Up @@ -1180,7 +1180,7 @@ def swish_test(x):
elu = mx.gluon.nn.ELU()
def elu_test(x):
def elu(x):
return 1.0 * (mx.nd.exp(x) - 1) if x < 0 else x
return mx.nd.expm1(x) if x <= 0.0 else x
return [elu(x_i) for x_i in x]

for test_point, ref_point in zip(elu_test(point_to_validate), elu(point_to_validate)):
Expand Down

0 comments on commit a0b57b0

Please sign in to comment.