Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
zhangbo9674 committed Feb 24, 2025
1 parent 428bd09 commit 41b9107
Showing 1 changed file with 2 additions and 10 deletions.
12 changes: 2 additions & 10 deletions paddlenlp/transformers/deepseek_v2/modeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,11 +231,8 @@ def scaled_dot_product_attention(
)

attn_weights = attn_weights + attention_mask
if not paddle.in_dynamic_mode():
with paddle.amp.auto_cast(False):

Check warning on line 234 in paddlenlp/transformers/deepseek_v2/modeling.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/transformers/deepseek_v2/modeling.py#L234

Added line #L234 was not covered by tests
attn_weights = F.softmax(attn_weights, axis=-1, dtype="float32").astype(query_states.dtype)
else:
with paddle.amp.auto_cast(False):
attn_weights = F.softmax(attn_weights, axis=-1, dtype="float32").astype(query_states.dtype)

attn_weights = F.dropout(attn_weights, p=config.attention_dropout, training=training)

Expand Down Expand Up @@ -335,12 +332,7 @@ def forward(self, hidden_states):
f"Implementation of fused_rms_norm is not available on {get_env_device()}. Please install paddle_xpu to use this feature"
)

if paddle.in_dynamic_mode():
with paddle.amp.auto_cast(False):
hidden_states = hidden_states.astype("float32")
variance = hidden_states.pow(2).mean(-1, keepdim=True)
hidden_states = paddle.rsqrt(variance + self.variance_epsilon) * hidden_states
else:
with paddle.amp.auto_cast(False):

Check warning on line 335 in paddlenlp/transformers/deepseek_v2/modeling.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/transformers/deepseek_v2/modeling.py#L335

Added line #L335 was not covered by tests
hidden_states = hidden_states.astype("float32")
variance = hidden_states.pow(2).mean(-1, keepdim=True)
hidden_states = paddle.rsqrt(variance + self.variance_epsilon) * hidden_states
Expand Down

0 comments on commit 41b9107

Please sign in to comment.