diff --git a/examples/run_lm_finetuning.py b/examples/run_lm_finetuning.py index 52a1b75a6542..0e6c3774aa23 100644 --- a/examples/run_lm_finetuning.py +++ b/examples/run_lm_finetuning.py @@ -215,6 +215,7 @@ def train(args, train_dataset, model, tokenizer): global_step = 0 tr_loss, logging_loss = 0.0, 0.0 + model.resize_token_embeddings(len(tokenizer)) model.zero_grad() train_iterator = trange(int(args.num_train_epochs), desc="Epoch", disable=args.local_rank not in [-1, 0]) set_seed(args) # Added here for reproducibility (even between python 2 and 3)