From 8e216e5d9b23a81f3490d7d4c9ebbaac12880818 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 1 Jul 2024 13:29:27 -0300 Subject: [PATCH] fix: Add LanguageModel to field_typing module (#2410) * feat: Add LanguageModel to field_typing module * chore: Fix type annotations in model build methods --------- Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- src/backend/base/langflow/base/models/model.py | 8 ++++---- .../components/deactivated/ChatLiteLLMModel.py | 5 ++--- .../components/models/AmazonBedrockModel.py | 4 ++-- .../langflow/components/models/AnthropicModel.py | 4 ++-- .../langflow/components/models/AzureOpenAIModel.py | 4 ++-- .../base/langflow/components/models/CohereModel.py | 2 +- .../components/models/GoogleGenerativeAIModel.py | 14 +++----------- .../base/langflow/components/models/GroqModel.py | 4 ++-- .../langflow/components/models/HuggingFaceModel.py | 4 ++-- .../langflow/components/models/MistralModel.py | 4 ++-- .../base/langflow/components/models/OllamaModel.py | 4 ++-- .../base/langflow/components/models/OpenAIModel.py | 6 +++--- .../langflow/components/models/VertexAiModel.py | 4 ++-- src/backend/base/langflow/field_typing/__init__.py | 2 ++ 14 files changed, 31 insertions(+), 38 deletions(-) diff --git a/src/backend/base/langflow/base/models/model.py b/src/backend/base/langflow/base/models/model.py index bea9c88d8a03..0ea39fcb24cf 100644 --- a/src/backend/base/langflow/base/models/model.py +++ b/src/backend/base/langflow/base/models/model.py @@ -143,13 +143,13 @@ def get_chat_result( messages.append(HumanMessage(content=input_value)) inputs: Union[list, dict] = messages or {} try: - runnable = runnable.with_config( - {"run_name": self.display_name, "project_name": self._tracing_service.project_name} + runnable = runnable.with_config( # type: ignore + {"run_name": self.display_name, "project_name": self._tracing_service.project_name} # type: ignore ) if stream: - return runnable.stream(inputs) + return runnable.stream(inputs) # type: ignore else: - message = runnable.invoke(inputs) + message = runnable.invoke(inputs) # type: ignore result = message.content if hasattr(message, "content") else message if isinstance(message, AIMessage): status_message = self.build_status_message(message) diff --git a/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py b/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py index cd487c4a0815..10f7a1eb918d 100644 --- a/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py +++ b/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py @@ -127,7 +127,7 @@ class ChatLiteLLMModelComponent(LCModelComponent): Output(display_name="Language Model", name="model_output", method="build_model"), ] - def build_model(self) -> LanguageModel: + def build_model(self) -> LanguageModel: # type: ignore[type-var] try: import litellm # type: ignore @@ -176,5 +176,4 @@ def build_model(self) -> LanguageModel: openrouter_api_key=api_keys["openrouter_api_key"], ) - return output - return output + return output # type: ignore diff --git a/src/backend/base/langflow/components/models/AmazonBedrockModel.py b/src/backend/base/langflow/components/models/AmazonBedrockModel.py index ba06c6723e1b..21c778a891bb 100644 --- a/src/backend/base/langflow/components/models/AmazonBedrockModel.py +++ b/src/backend/base/langflow/components/models/AmazonBedrockModel.py @@ -69,7 +69,7 @@ class AmazonBedrockComponent(LCModelComponent): Output(display_name="Language Model", name="model_output", method="build_model"), ] - def build_model(self) -> LanguageModel: + def build_model(self) -> LanguageModel: # type: ignore[type-var] model_id = self.model_id credentials_profile_name = self.credentials_profile_name region_name = self.region_name @@ -89,4 +89,4 @@ def build_model(self) -> LanguageModel: ) except Exception as e: raise ValueError("Could not connect to AmazonBedrock API.") from e - return output + return output # type: ignore diff --git a/src/backend/base/langflow/components/models/AnthropicModel.py b/src/backend/base/langflow/components/models/AnthropicModel.py index 4a750b645608..39a78c3840f4 100644 --- a/src/backend/base/langflow/components/models/AnthropicModel.py +++ b/src/backend/base/langflow/components/models/AnthropicModel.py @@ -64,7 +64,7 @@ class AnthropicModelComponent(LCModelComponent): Output(display_name="Language Model", name="model_output", method="build_model"), ] - def build_model(self) -> LanguageModel: + def build_model(self) -> LanguageModel: # type: ignore[type-var] model = self.model anthropic_api_key = self.anthropic_api_key max_tokens = self.max_tokens @@ -83,7 +83,7 @@ def build_model(self) -> LanguageModel: except Exception as e: raise ValueError("Could not connect to Anthropic API.") from e - return output + return output # type: ignore def _get_exception_message(self, exception: Exception) -> str | None: """ diff --git a/src/backend/base/langflow/components/models/AzureOpenAIModel.py b/src/backend/base/langflow/components/models/AzureOpenAIModel.py index 9a2ebbbe029b..23528ed29a49 100644 --- a/src/backend/base/langflow/components/models/AzureOpenAIModel.py +++ b/src/backend/base/langflow/components/models/AzureOpenAIModel.py @@ -78,7 +78,7 @@ class AzureChatOpenAIComponent(LCModelComponent): Output(display_name="Language Model", name="model_output", method="model_response"), ] - def model_response(self) -> LanguageModel: + def model_response(self) -> LanguageModel: # type: ignore[type-var] model = self.model azure_endpoint = self.azure_endpoint azure_deployment = self.azure_deployment @@ -107,4 +107,4 @@ def model_response(self) -> LanguageModel: except Exception as e: raise ValueError("Could not connect to AzureOpenAI API.") from e - return output + return output # type: ignore diff --git a/src/backend/base/langflow/components/models/CohereModel.py b/src/backend/base/langflow/components/models/CohereModel.py index 305f8b4c20d1..101b72397c77 100644 --- a/src/backend/base/langflow/components/models/CohereModel.py +++ b/src/backend/base/langflow/components/models/CohereModel.py @@ -51,4 +51,4 @@ def build_model(self) -> LanguageModel | BaseChatModel: cohere_api_key=api_key, ) - return output + return output # type: ignore diff --git a/src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py b/src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py index d4e75e54b57f..7bcaf0b0760d 100644 --- a/src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py +++ b/src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py @@ -3,15 +3,7 @@ from langflow.base.constants import STREAM_INFO_TEXT from langflow.base.models.model import LCModelComponent from langflow.field_typing import LanguageModel -from langflow.inputs import ( - BoolInput, - DropdownInput, - FloatInput, - IntInput, - MessageInput, - SecretStrInput, - StrInput, -) +from langflow.inputs import BoolInput, DropdownInput, FloatInput, IntInput, MessageInput, SecretStrInput, StrInput class GoogleGenerativeAIComponent(LCModelComponent): @@ -66,7 +58,7 @@ class GoogleGenerativeAIComponent(LCModelComponent): ), ] - def build_model(self) -> LanguageModel: + def build_model(self) -> LanguageModel: # type: ignore[type-var] try: from langchain_google_genai import ChatGoogleGenerativeAI except ImportError: @@ -90,4 +82,4 @@ def build_model(self) -> LanguageModel: google_api_key=SecretStr(google_api_key), ) - return output + return output # type: ignore diff --git a/src/backend/base/langflow/components/models/GroqModel.py b/src/backend/base/langflow/components/models/GroqModel.py index a4784354f4e5..fe1f4f4b1505 100644 --- a/src/backend/base/langflow/components/models/GroqModel.py +++ b/src/backend/base/langflow/components/models/GroqModel.py @@ -68,7 +68,7 @@ class GroqModel(LCModelComponent): ), ] - def build_model(self) -> LanguageModel: + def build_model(self) -> LanguageModel: # type: ignore[type-var] groq_api_key = self.groq_api_key model_name = self.model_name max_tokens = self.max_tokens @@ -87,4 +87,4 @@ def build_model(self) -> LanguageModel: streaming=stream, ) - return output + return output # type: ignore diff --git a/src/backend/base/langflow/components/models/HuggingFaceModel.py b/src/backend/base/langflow/components/models/HuggingFaceModel.py index 021f15793fef..28c341114b66 100644 --- a/src/backend/base/langflow/components/models/HuggingFaceModel.py +++ b/src/backend/base/langflow/components/models/HuggingFaceModel.py @@ -36,7 +36,7 @@ class HuggingFaceEndpointsComponent(LCModelComponent): Output(display_name="Language Model", name="model_output", method="build_model"), ] - def build_model(self) -> LanguageModel: + def build_model(self) -> LanguageModel: # type: ignore[type-var] endpoint_url = self.endpoint_url task = self.task huggingfacehub_api_token = self.huggingfacehub_api_token @@ -53,4 +53,4 @@ def build_model(self) -> LanguageModel: raise ValueError("Could not connect to HuggingFace Endpoints API.") from e output = ChatHuggingFace(llm=llm) - return output + return output # type: ignore diff --git a/src/backend/base/langflow/components/models/MistralModel.py b/src/backend/base/langflow/components/models/MistralModel.py index cf20de5a0d65..1604ecb5bb0f 100644 --- a/src/backend/base/langflow/components/models/MistralModel.py +++ b/src/backend/base/langflow/components/models/MistralModel.py @@ -70,7 +70,7 @@ class MistralAIModelComponent(LCModelComponent): Output(display_name="Language Model", name="model_output", method="build_model"), ] - def build_model(self) -> LanguageModel: + def build_model(self) -> LanguageModel: # type: ignore[type-var] mistral_api_key = self.mistral_api_key temperature = self.temperature model_name = self.model_name @@ -102,4 +102,4 @@ def build_model(self) -> LanguageModel: safe_mode=safe_mode, ) - return output + return output # type: ignore diff --git a/src/backend/base/langflow/components/models/OllamaModel.py b/src/backend/base/langflow/components/models/OllamaModel.py index 71a62cd68d3e..1c0e5fd2ca24 100644 --- a/src/backend/base/langflow/components/models/OllamaModel.py +++ b/src/backend/base/langflow/components/models/OllamaModel.py @@ -223,7 +223,7 @@ def get_model(self, url: str) -> list[str]: Output(display_name="Language Model", name="model_output", method="build_model"), ] - def build_model(self) -> LanguageModel: + def build_model(self) -> LanguageModel: # type: ignore[type-var] # Mapping mirostat settings to their corresponding values mirostat_options = {"Mirostat": 1, "Mirostat 2.0": 2} @@ -272,4 +272,4 @@ def build_model(self) -> LanguageModel: except Exception as e: raise ValueError("Could not initialize Ollama LLM.") from e - return output + return output # type: ignore diff --git a/src/backend/base/langflow/components/models/OpenAIModel.py b/src/backend/base/langflow/components/models/OpenAIModel.py index 480205fdb405..0cdc8fdf9e21 100644 --- a/src/backend/base/langflow/components/models/OpenAIModel.py +++ b/src/backend/base/langflow/components/models/OpenAIModel.py @@ -80,8 +80,8 @@ class OpenAIModelComponent(LCModelComponent): ), ] - def build_model(self) -> LanguageModel: - # self.output_schea is a list of dictionaries + def build_model(self) -> LanguageModel: # type: ignore[type-var] + # self.output_schea is a list of dictionarie s # let's convert it to a dictionary output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {}) openai_api_key = self.openai_api_key @@ -112,7 +112,7 @@ def build_model(self) -> LanguageModel: else: output = output.bind(response_format={"type": "json_object"}) # type: ignore - return output + return output # type: ignore def _get_exception_message(self, e: Exception): """ diff --git a/src/backend/base/langflow/components/models/VertexAiModel.py b/src/backend/base/langflow/components/models/VertexAiModel.py index fb06ac5f2042..0415a7971620 100644 --- a/src/backend/base/langflow/components/models/VertexAiModel.py +++ b/src/backend/base/langflow/components/models/VertexAiModel.py @@ -52,7 +52,7 @@ class ChatVertexAIComponent(LCModelComponent): Output(display_name="Language Model", name="model_output", method="build_model"), ] - def build_model(self) -> LanguageModel: + def build_model(self) -> LanguageModel: # type: ignore[type-var] credentials = self.credentials location = self.location max_output_tokens = self.max_output_tokens @@ -75,4 +75,4 @@ def build_model(self) -> LanguageModel: verbose=verbose, ) - return output + return output # type: ignore diff --git a/src/backend/base/langflow/field_typing/__init__.py b/src/backend/base/langflow/field_typing/__init__.py index bd25f6cf4b7a..e387c4c8ddd5 100644 --- a/src/backend/base/langflow/field_typing/__init__.py +++ b/src/backend/base/langflow/field_typing/__init__.py @@ -26,6 +26,7 @@ TextSplitter, Tool, VectorStore, + LanguageModel, ) from .range_spec import RangeSpec @@ -84,4 +85,5 @@ def __getattr__(name: str) -> Any: "BaseChatModel", "Retriever", "Text", + "LanguageModel", ]