From 3b26237a18864426743f804de8c464408975e798 Mon Sep 17 00:00:00 2001 From: Guy Rosin Date: Fri, 15 Jan 2021 05:07:35 +0200 Subject: [PATCH] Add missing kwargs to Pipeline's tokenizer() call --- src/transformers/pipelines/base.py | 1 + tests/test_pipelines_fill_mask.py | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/transformers/pipelines/base.py b/src/transformers/pipelines/base.py index 124f2e290ebc..2139980da236 100644 --- a/src/transformers/pipelines/base.py +++ b/src/transformers/pipelines/base.py @@ -590,6 +590,7 @@ def _parse_and_tokenize( return_tensors=self.framework, padding=padding, truncation=truncation, + **kwargs, ) return inputs diff --git a/tests/test_pipelines_fill_mask.py b/tests/test_pipelines_fill_mask.py index f087ed213577..fec273f484b3 100644 --- a/tests/test_pipelines_fill_mask.py +++ b/tests/test_pipelines_fill_mask.py @@ -77,9 +77,12 @@ def test_torch_fill_mask(self): self.assertIsInstance(outputs, list) # This used to fail with `cannot mix args and kwargs` - outputs = nlp(valid_inputs, something=False) + outputs = nlp(valid_inputs, verbose=False) self.assertIsInstance(outputs, list) + # This fails because `invalid_arg` is not an argument of the pipeline's tokenizer + self.assertRaises(TypeError, nlp, valid_inputs, invalid_arg=False) + @require_torch def test_torch_fill_mask_with_targets(self): valid_inputs = ["My name is "]