From c6217bfe018ed88113f5c3d95855fe7ca3fa4699 Mon Sep 17 00:00:00 2001 From: Gaurav Jain Date: Sat, 21 Dec 2024 13:28:56 -0800 Subject: [PATCH] Swap gated `meta-llama/llama-3.2` with `allenai/llama` --- tests/generation/test_candidate_generator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/generation/test_candidate_generator.py b/tests/generation/test_candidate_generator.py index 8f3e024ae358..2fcda92a9e1b 100644 --- a/tests/generation/test_candidate_generator.py +++ b/tests/generation/test_candidate_generator.py @@ -268,7 +268,7 @@ def setUpClass(cls): cls.assistant_model = AutoModelForCausalLM.from_pretrained("hf-internal-testing/tiny-random-gpt2").to( cls.device ) - cls.main_tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-3.2-1B-Instruct") + cls.main_tokenizer = AutoTokenizer.from_pretrained("allenai/Llama-3.1-Tulu-3-8B-SFT") cls.assistant_tokenizer = AutoTokenizer.from_pretrained("hf-internal-testing/tiny-random-gpt2") cls.generation_config = GenerationConfig()