Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Dec 6, 2022
1 parent d6391d4 commit b62b234
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions nemo/collections/nlp/modules/common/megatron/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1528,7 +1528,7 @@ def __init__(
gradient_accumulation_fusion=gradient_accumulation_fusion,
dropout=ffn_dropout,
)

def _get_bias_droput_add_func(self, transformer_block_type='pre_ln', position_after='attention'):
"""
Returns a function that potentially fuses the dropout and bias addition.
Expand Down Expand Up @@ -1841,7 +1841,7 @@ def forward(
cross_attention_relative_position_bias=None,
checkpoint_core_attention=False,
alibi_position_bias=None,
):
):
if self.dtype == torch.float32:
return super().forward(
hidden_states,
Expand Down

0 comments on commit b62b234

Please sign in to comment.