From d0cc5a677fe7f077f7be01112e9d3a7e4baa2468 Mon Sep 17 00:00:00 2001 From: Frank Liu Date: Wed, 7 Nov 2018 21:31:55 -0800 Subject: [PATCH] Fix Sphinx docstring formatting error. (#13004, #13005, #13006) --- python/mxnet/gluon/rnn/rnn_cell.py | 6 +++--- python/mxnet/rnn/rnn_cell.py | 18 +++++++++--------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/python/mxnet/gluon/rnn/rnn_cell.py b/python/mxnet/gluon/rnn/rnn_cell.py index b57dc935af83..98e96fc6da17 100644 --- a/python/mxnet/gluon/rnn/rnn_cell.py +++ b/python/mxnet/gluon/rnn/rnn_cell.py @@ -333,7 +333,7 @@ class RNNCell(HybridRecurrentCell): Initializer for the bias vector. h2h_bias_initializer : str or Initializer, default 'zeros' Initializer for the bias vector. - prefix : str, default 'rnn_' + prefix : str, default ``'rnn_'`` Prefix for name of `Block`s (and name of weight if params is `None`). params : Parameter or None @@ -440,7 +440,7 @@ class LSTMCell(HybridRecurrentCell): Initializer for the bias vector. h2h_bias_initializer : str or Initializer, default 'zeros' Initializer for the bias vector. - prefix : str, default 'lstm_' + prefix : str, default ``'lstm_'`` Prefix for name of `Block`s (and name of weight if params is `None`). params : Parameter or None, default None @@ -565,7 +565,7 @@ class GRUCell(HybridRecurrentCell): Initializer for the bias vector. h2h_bias_initializer : str or Initializer, default 'zeros' Initializer for the bias vector. - prefix : str, default 'gru_' + prefix : str, default ``'gru_'`` prefix for name of `Block`s (and name of weight if params is `None`). params : Parameter or None, default None diff --git a/python/mxnet/rnn/rnn_cell.py b/python/mxnet/rnn/rnn_cell.py index 3301102ba905..9097cbae728d 100644 --- a/python/mxnet/rnn/rnn_cell.py +++ b/python/mxnet/rnn/rnn_cell.py @@ -368,7 +368,7 @@ class RNNCell(BaseRNNCell): Number of units in output symbol. activation : str or Symbol, default 'tanh' Type of activation function. Options are 'relu' and 'tanh'. - prefix : str, default 'rnn_' + prefix : str, default ``'rnn_'`` Prefix for name of layers (and name of weight if params is None). params : RNNParams, default None Container for weight sharing between cells. Created if None. @@ -412,7 +412,7 @@ class LSTMCell(BaseRNNCell): ---------- num_hidden : int Number of units in output symbol. - prefix : str, default 'lstm_' + prefix : str, default ``'lstm_'`` Prefix for name of layers (and name of weight if params is None). params : RNNParams, default None Container for weight sharing between cells. Created if None. @@ -475,7 +475,7 @@ class GRUCell(BaseRNNCell): ---------- num_hidden : int Number of units in output symbol. - prefix : str, default 'gru_' + prefix : str, default ``'gru_'`` Prefix for name of layers (and name of weight if params is None). params : RNNParams, default None Container for weight sharing between cells. Created if None. @@ -554,7 +554,7 @@ class FusedRNNCell(BaseRNNCell): Whether to return the states that can be used as starting states next time. forget_bias : bias added to forget gate, default 1.0. Jozefowicz et al. 2015 recommends setting this to 1.0 - prefix : str, default '$mode_' such as 'lstm_' + prefix : str, default ``'$mode_'`` such as ``'lstm_'`` Prefix for names of layers (this prefix is also used for names of weights if `params` is None i.e. if `params` are being created and not reused) @@ -832,7 +832,7 @@ class DropoutCell(BaseRNNCell): dropout : float Percentage of elements to drop out, which is 1 - percentage to retain. - prefix : str, default 'dropout_' + prefix : str, default ``'dropout_'`` Prefix for names of layers (this prefix is also used for names of weights if `params` is None i.e. if `params` are being created and not reused) @@ -1007,7 +1007,7 @@ class BidirectionalCell(BaseRNNCell): params : RNNParams, default None. Container for weight sharing between cells. A new RNNParams container is created if `params` is None. - output_prefix : str, default 'bi_' + output_prefix : str, default ``'bi_'`` prefix for name of output """ def __init__(self, l_cell, r_cell, params=None, output_prefix='bi_'): @@ -1207,7 +1207,7 @@ class ConvRNNCell(BaseConvRNNCell): activation : str or Symbol, default functools.partial(symbol.LeakyReLU, act_type='leaky', slope=0.2) Type of activation function. - prefix : str, default 'ConvRNN_' + prefix : str, default ``'ConvRNN_'`` Prefix for name of layers (and name of weight if params is None). params : RNNParams, default None Container for weight sharing between cells. Created if None. @@ -1287,7 +1287,7 @@ class ConvLSTMCell(BaseConvRNNCell): activation : str or Symbol default functools.partial(symbol.LeakyReLU, act_type='leaky', slope=0.2) Type of activation function. - prefix : str, default 'ConvLSTM_' + prefix : str, default ``'ConvLSTM_'`` Prefix for name of layers (and name of weight if params is None). params : RNNParams, default None Container for weight sharing between cells. Created if None. @@ -1379,7 +1379,7 @@ class ConvGRUCell(BaseConvRNNCell): activation : str or Symbol, default functools.partial(symbol.LeakyReLU, act_type='leaky', slope=0.2) Type of activation function. - prefix : str, default 'ConvGRU_' + prefix : str, default ``'ConvGRU_'`` Prefix for name of layers (and name of weight if params is None). params : RNNParams, default None Container for weight sharing between cells. Created if None.