Skip to content

Commit

Permalink
Fix activation checkpoint (NVIDIA#7334)
Browse files Browse the repository at this point in the history
* Fix activation checkpoint

Signed-off-by: Cheng-Ping Hsieh <[email protected]>

* Remove inference_mode checkpoint

Signed-off-by: Cheng-Ping Hsieh <[email protected]>

---------

Signed-off-by: Cheng-Ping Hsieh <[email protected]>
  • Loading branch information
hsiehjackson committed Aug 29, 2023
1 parent 62c5f11 commit 22e61ca
Showing 1 changed file with 2 additions and 4 deletions.
6 changes: 2 additions & 4 deletions nemo/collections/nlp/modules/common/megatron/attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,24 +245,22 @@ def _checkpointed_attention_forward(
"""Forward method with activation checkpointing."""

def custom_forward(*inputs):
if len(inputs) == 8:
if len(inputs) == 7:
query_layer = inputs[0]
key_layer = inputs[1]
value_layer = inputs[2]
attention_mask = inputs[3]
rotary_pos_emb = inputs[4]
relative_position_bias = inputs[5]
headscale_tensor = inputs[6]
inference_mode = inputs[7]
elif len(inputs) == 9:
elif len(inputs) == 8:
query_layer = inputs[0]
key_layer = inputs[1]
value_layer = inputs[2]
attention_mask = inputs[3]
rotary_pos_emb = (inputs[4], inputs[5])
relative_position_bias = inputs[6]
headscale_tensor = inputs[7]
inference_mode = inputs[8]
else:
raise ValueError('unexpected number of inputs')
output_ = self.core_attention(
Expand Down

0 comments on commit 22e61ca

Please sign in to comment.