Skip to content

Commit

Permalink
[cif] remove concat after in cif (#1763)
Browse files Browse the repository at this point in the history
  • Loading branch information
robin1001 authored Mar 20, 2023
1 parent a95baf0 commit d3fd9b9
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 25 deletions.
13 changes: 1 addition & 12 deletions wenet/cif/cif_decoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,6 @@ class BaseDecoder(nn.Module):
use_output_layer: whether to use output layer
pos_enc_class: PositionalEncoding or ScaledPositionalEncoding
normalize_before: whether to use layer_norm before the first block
concat_after: whether to concat attention layer's input and output
if True, additional linear will be applied.
i.e. x -> x + linear(concat(x, att(x)))
if False, no additional linear will be applied.
i.e. x -> x + att(x)
"""

def __init__(
Expand Down Expand Up @@ -177,7 +172,6 @@ def __init__(
use_output_layer: bool = True,
pos_enc_class=PositionalEncoding,
normalize_before: bool = True,
concat_after: bool = False,
embeds_id: int = -1,
):
assert check_argument_types()
Expand Down Expand Up @@ -205,8 +199,7 @@ def __init__(
PositionwiseFeedForward(attention_dim, linear_units,
dropout_rate),
dropout_rate,
normalize_before,
concat_after)
normalize_before)
for _ in range(num_blocks)
])

Expand Down Expand Up @@ -294,7 +287,6 @@ def __init__(
use_output_layer: bool = True,
pos_enc_class=PositionalEncoding,
normalize_before: bool = True,
concat_after: bool = False,
att_layer_num: int = 6,
kernel_size: int = 21,
sanm_shfit: int = 0
Expand Down Expand Up @@ -355,7 +347,6 @@ def __init__(
dropout_rate),
dropout_rate,
normalize_before,
concat_after,
) for _ in range(att_layer_num)
])
if num_blocks - att_layer_num <= 0:
Expand All @@ -374,7 +365,6 @@ def __init__(
dropout_rate),
dropout_rate,
normalize_before,
concat_after,
) for _ in range(num_blocks - att_layer_num)
])
self.decoders3 = torch.nn.ModuleList([
Expand All @@ -386,7 +376,6 @@ def __init__(
dropout_rate),
dropout_rate,
normalize_before,
concat_after,
) for _ in range(1)
])

Expand Down
13 changes: 0 additions & 13 deletions wenet/cif/decoder_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,6 @@ class DecoderLayerSANM(nn.Module):
dropout_rate (float): Dropout rate.
normalize_before (bool): Whether to use layer_norm before the first
block.
concat_after (bool): Whether to concat attention layer's input and
output.
if True, additional linear will be applied.
i.e. x -> x + linear(concat(x, att(x)))
if False, no additional linear will be applied. i.e. x -> x + att(x)
"""

def __init__(
Expand All @@ -49,7 +44,6 @@ def __init__(
feed_forward: nn.Module,
dropout_rate: float,
normalize_before: bool = True,
concat_after: bool = False,
):
"""Construct an DecoderLayer object."""
super(DecoderLayerSANM, self).__init__()
Expand All @@ -64,13 +58,6 @@ def __init__(
self.norm3 = nn.LayerNorm(size, eps=1e-12)
self.dropout = nn.Dropout(dropout_rate)
self.normalize_before = normalize_before
self.concat_after = concat_after
if self.concat_after:
self.concat_linear1 = nn.Linear(size + size, size)
self.concat_linear2 = nn.Linear(size + size, size)
else:
self.concat_linear1 = nn.Identity()
self.concat_linear2 = nn.Identity()

def forward(
self,
Expand Down

0 comments on commit d3fd9b9

Please sign in to comment.