diff --git a/src/transformers/modeling_outputs.py b/src/transformers/modeling_outputs.py index f05a6229df9d..26a41fc9d1a5 100644 --- a/src/transformers/modeling_outputs.py +++ b/src/transformers/modeling_outputs.py @@ -260,7 +260,7 @@ class MaskedLMOutput(ModelOutput): Args: loss (:obj:`torch.FloatTensor` of shape :obj:`(1,)`, `optional`, returned when :obj:`labels` is provided): - Masked languaged modeling (MLM) loss. + Masked language modeling (MLM) loss. logits (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, config.vocab_size)`): Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax). hidden_states (:obj:`tuple(torch.FloatTensor)`, `optional`, returned when ``output_hidden_states=True`` is passed or when ``config.output_hidden_states=True``): @@ -289,7 +289,7 @@ class Seq2SeqLMOutput(ModelOutput): Args: loss (:obj:`torch.FloatTensor` of shape :obj:`(1,)`, `optional`, returned when :obj:`labels` is provided): - Languaged modeling loss. + Language modeling loss. logits (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, config.vocab_size)`): Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax). past_key_values (:obj:`List[torch.FloatTensor]`, `optional`, returned when ``use_cache=True`` is passed or when ``config.use_cache=True``): diff --git a/src/transformers/modeling_prophetnet.py b/src/transformers/modeling_prophetnet.py index 57e4e4c6a3b6..417111c409bd 100644 --- a/src/transformers/modeling_prophetnet.py +++ b/src/transformers/modeling_prophetnet.py @@ -225,7 +225,7 @@ class ProphetNetSeq2SeqLMOutput(ModelOutput): Args: loss (:obj:`torch.FloatTensor` of shape :obj:`(1,)`, `optional`, returned when :obj:`labels` is provided): - Languaged modeling loss. + Language modeling loss. logits (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, decoder_sequence_length, config.vocab_size)`): Prediction scores of the main stream language modeling head (scores for each vocabulary token before SoftMax). @@ -438,7 +438,7 @@ class ProphetNetDecoderLMOutput(ModelOutput): Args: loss (:obj:`torch.FloatTensor` of shape :obj:`(1,)`, `optional`, returned when :obj:`labels` is provided): - Languaged modeling loss. + Language modeling loss. logits (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, decoder_sequence_length, config.vocab_size)`): Prediction scores of the main stream language modeling head (scores for each vocabulary token before SoftMax). diff --git a/src/transformers/modeling_t5.py b/src/transformers/modeling_t5.py index f71acf1492f4..8910be332144 100644 --- a/src/transformers/modeling_t5.py +++ b/src/transformers/modeling_t5.py @@ -1124,7 +1124,7 @@ def forward( >>> model = T5ForConditionalGeneration.from_pretrained('t5-small', return_dict=True) >>> input_ids = tokenizer('The walks in park', return_tensors='pt').input_ids - labels = tokenizer(' cute dog the ', return_tensors='pt').input_ids + >>> labels = tokenizer(' cute dog the ', return_tensors='pt').input_ids >>> outputs = model(input_ids=input_ids, labels=labels) >>> loss = outputs.loss >>> logits = outputs.logits diff --git a/src/transformers/modeling_tf_outputs.py b/src/transformers/modeling_tf_outputs.py index ac1fd8f5fb50..9a4deb65b0d5 100644 --- a/src/transformers/modeling_tf_outputs.py +++ b/src/transformers/modeling_tf_outputs.py @@ -227,7 +227,7 @@ class TFMaskedLMOutput(ModelOutput): Args: loss (:obj:`tf.Tensor` of shape :obj:`(1,)`, `optional`, returned when :obj:`labels` is provided): - Masked languaged modeling (MLM) loss. + Masked language modeling (MLM) loss. logits (:obj:`tf.Tensor` of shape :obj:`(batch_size, sequence_length, config.vocab_size)`): Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax). hidden_states (:obj:`tuple(tf.Tensor)`, `optional`, returned when ``output_hidden_states=True`` is passed or when ``config.output_hidden_states=True``): @@ -256,7 +256,7 @@ class TFSeq2SeqLMOutput(ModelOutput): Args: loss (:obj:`tf.Tensor` of shape :obj:`(1,)`, `optional`, returned when :obj:`labels` is provided): - Languaged modeling loss. + Language modeling loss. logits (:obj:`tf.Tensor` of shape :obj:`(batch_size, sequence_length, config.vocab_size)`): Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax). past_key_values (:obj:`List[tf.Tensor]`, `optional`, returned when ``use_cache=True`` is passed or when ``config.use_cache=True``): diff --git a/src/transformers/modeling_tf_t5.py b/src/transformers/modeling_tf_t5.py index 827af120f86c..e09f5c733164 100644 --- a/src/transformers/modeling_tf_t5.py +++ b/src/transformers/modeling_tf_t5.py @@ -1213,7 +1213,7 @@ def call( >>> model = TFT5ForConditionalGeneration.from_pretrained('t5-small') >>> inputs = tokenizer('The walks in park', return_tensors='tf').input_ids - labels = tokenizer(' cute dog the ', return_tensors='tf').input_ids + >>> labels = tokenizer(' cute dog the ', return_tensors='tf').input_ids >>> outputs = model(inputs, labels=labels) >>> loss = outputs.loss >>> logits = outputs.logits