diff --git a/nn/conformer.py b/nn/conformer.py index f10a451a..d810990f 100644 --- a/nn/conformer.py +++ b/nn/conformer.py @@ -131,7 +131,7 @@ def __init__( :param att_dropout: attention dropout value :param out_dim: the output feature dimension :param num_heads: the number of attention heads - :param batch_norm_opts: batch norm options + :param batch_norm_opts: passed to :class:`nn.BatchNorm` """ super().__init__() @@ -181,7 +181,7 @@ class ConformerEncoder(nn.Module): def __init__(self, encoder_layer: ConformerEncoderLayer, num_layers: int): """ - :param encoder_layer: an instance of `class:ConformerEncoderLayer` + :param encoder_layer: an instance of :class:`ConformerEncoderLayer` :param num_layers: the number of encoder layers """ super().__init__()