From db24a642fd5aba4ecd1e961ecf0b8e0af1df7d9a Mon Sep 17 00:00:00 2001 From: berrytern Date: Thu, 27 Jun 2024 13:38:31 -0300 Subject: [PATCH] chore: Enhance the report on missing parameters for the Azure provider on ChatLiteLLMModelComponent. --- .../components/deactivated/ChatLiteLLMModel.py | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py b/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py index 4d0c718c1261..8711f6d2ffbd 100644 --- a/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py +++ b/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py @@ -1,5 +1,3 @@ -from typing import Optional - from langchain_community.chat_models.litellm import ChatLiteLLM, ChatLiteLLMException from langflow.base.constants import STREAM_INFO_TEXT @@ -123,7 +121,6 @@ class ChatLiteLLMModelComponent(LCModelComponent): def build_model(self) -> LanguageModel: try: import litellm - import os litellm.drop_params = True litellm.set_verbose = self.verbose @@ -132,20 +129,14 @@ def build_model(self) -> LanguageModel: "Could not import litellm python package. " "Please install it with `pip install litellm`" ) - provider_map = { - "OpenAI": "openai_api_key", - "Azure": "azure_api_key", - "Anthropic": "anthropic_api_key", - "Replicate": "replicate_api_key", - "Cohere": "cohere_api_key", - "OpenRouter": "openrouter_api_key", - } # Set the API key based on the provider self.kwargs[self.provider] = self.api_key self.model_kwargs["api_key"] = self.api_key if self.provider == "Azure": if "api_base" not in self.kwargs: raise Exception("Missing api_base on kwargs") + if "api_version" not in self.model_kwargs: + raise Exception("Missing api_version on model_kwargs") llm = ChatLiteLLM( model=f"{self.provider.lower()}/{self.model}", client=None, @@ -157,7 +148,7 @@ def build_model(self) -> LanguageModel: n=self.n, max_tokens=self.max_tokens, max_retries=self.max_retries, - **self.kwargs + **self.kwargs, ) return llm