Skip to content

Commit

Permalink
remove mx.rnn APIs (apache#18507)
Browse files Browse the repository at this point in the history
* remove mx.rnn APIs

* fix test

* update test

Co-authored-by: Ubuntu <[email protected]>
Co-authored-by: Lin <[email protected]>
  • Loading branch information
3 people authored and AntiZpvoh committed Jul 6, 2020
1 parent 1e5c3cc commit 4498ebf
Show file tree
Hide file tree
Showing 14 changed files with 0 additions and 3,256 deletions.
1 change: 0 additions & 1 deletion python/mxnet/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,6 @@

from . import test_utils

from . import rnn
from . import gluon

from . import _deferred_compute
Expand Down
54 changes: 0 additions & 54 deletions python/mxnet/initializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -713,57 +713,3 @@ def _init_weight(self, name, arr):
# gate of the 4 LSTM gates, we modify the according values.
num_hidden = int(arr.shape[0] / 4)
arr[num_hidden:2*num_hidden] = self.forget_bias


@register
class FusedRNN(Initializer):
"""Initialize parameters for fused rnn layers.
Parameters
----------
init : Initializer
initializer applied to unpacked weights. Fall back to global
initializer if None.
num_hidden : int
should be the same with arguments passed to FusedRNNCell.
num_layers : int
should be the same with arguments passed to FusedRNNCell.
mode : str
should be the same with arguments passed to FusedRNNCell.
bidirectional : bool
should be the same with arguments passed to FusedRNNCell.
forget_bias : float
should be the same with arguments passed to FusedRNNCell.
"""
def __init__(self, init, num_hidden, num_layers, mode, bidirectional=False, forget_bias=1.0):
if isinstance(init, string_types):
klass, kwargs = json.loads(init)
init = registry._REGISTRY[klass.lower()](**kwargs)
super(FusedRNN, self).__init__(init=init.dumps() if init is not None else None,
num_hidden=num_hidden, num_layers=num_layers, mode=mode,
bidirectional=bidirectional, forget_bias=forget_bias)
self._init = init
self._num_hidden = num_hidden
self._num_layers = num_layers
self._mode = mode
self._bidirectional = bidirectional
self._forget_bias = forget_bias

def _init_weight(self, desc, arr): # pylint: disable=arguments-differ
from .rnn import rnn_cell
cell = rnn_cell.FusedRNNCell(self._num_hidden, self._num_layers,
self._mode, self._bidirectional,
forget_bias=self._forget_bias, prefix='')
args = cell.unpack_weights({'parameters': arr})
for name in args:
arg_desc = InitDesc(name, global_init=desc.global_init)
# for lstm bias, we use a custom initializer
# which adds a bias to the forget gate
if self._mode == 'lstm' and name.endswith("_f_bias"):
args[name][:] = self._forget_bias
elif self._init is None:
desc.global_init(arg_desc, args[name])
else:
self._init(arg_desc, args[name])

arr[:] = cell.pack_weights(args)['parameters']
29 changes: 0 additions & 29 deletions python/mxnet/rnn/__init__.py

This file was deleted.

210 changes: 0 additions & 210 deletions python/mxnet/rnn/io.py

This file was deleted.

Loading

0 comments on commit 4498ebf

Please sign in to comment.