diff --git a/src/transformers/trainer_seq2seq.py b/src/transformers/trainer_seq2seq.py index aaff31a2dc9e..13d407bec4a1 100644 --- a/src/transformers/trainer_seq2seq.py +++ b/src/transformers/trainer_seq2seq.py @@ -288,7 +288,9 @@ def prediction_step( and "decoder_input_ids" in generation_inputs and generation_inputs["labels"].shape == generation_inputs["decoder_input_ids"].shape ): - generation_inputs = {k: v for k, v in inputs.items() if k != "decoder_input_ids"} + generation_inputs = { + k: v for k, v in inputs.items() if k not in ("decoder_input_ids", "decoder_attention_mask") + } generated_tokens = self.model.generate(**generation_inputs, **gen_kwargs) # Temporary hack to ensure the generation config is not initialized for each iteration of the evaluation loop