From 6fb30bc0c955c624410df8c01947e971409ddce2 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Fri, 15 Sep 2023 04:07:29 -0700 Subject: [PATCH] chore: LLM - Fixed the `test_code_chat_model_send_message_streaming` test PiperOrigin-RevId: 565637309 --- tests/system/aiplatform/test_language_models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system/aiplatform/test_language_models.py b/tests/system/aiplatform/test_language_models.py index 7740fe0fde..ad3a782228 100644 --- a/tests/system/aiplatform/test_language_models.py +++ b/tests/system/aiplatform/test_language_models.py @@ -306,7 +306,7 @@ def test_code_generation_streaming(self): def test_code_chat_model_send_message_streaming(self): aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) - chat_model = language_models.ChatModel.from_pretrained("codechat-bison@001") + chat_model = language_models.CodeChatModel.from_pretrained("codechat-bison@001") chat = chat_model.start_chat() message1 = "Please help write a function to calculate the max of two numbers"