Skip to content

Commit

Permalink
Merge branch 'dev' into fix_component_parsing
Browse files Browse the repository at this point in the history
  • Loading branch information
github-actions[bot] authored Jul 1, 2024
2 parents e302e9a + 8e216e5 commit c337675
Show file tree
Hide file tree
Showing 14 changed files with 31 additions and 38 deletions.
8 changes: 4 additions & 4 deletions src/backend/base/langflow/base/models/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,13 +143,13 @@ def get_chat_result(
messages.append(HumanMessage(content=input_value))
inputs: Union[list, dict] = messages or {}
try:
runnable = runnable.with_config(
{"run_name": self.display_name, "project_name": self._tracing_service.project_name}
runnable = runnable.with_config( # type: ignore
{"run_name": self.display_name, "project_name": self._tracing_service.project_name} # type: ignore
)
if stream:
return runnable.stream(inputs)
return runnable.stream(inputs) # type: ignore
else:
message = runnable.invoke(inputs)
message = runnable.invoke(inputs) # type: ignore
result = message.content if hasattr(message, "content") else message
if isinstance(message, AIMessage):
status_message = self.build_status_message(message)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ class ChatLiteLLMModelComponent(LCModelComponent):
Output(display_name="Language Model", name="model_output", method="build_model"),
]

def build_model(self) -> LanguageModel:
def build_model(self) -> LanguageModel: # type: ignore[type-var]
try:
import litellm # type: ignore

Expand Down Expand Up @@ -176,5 +176,4 @@ def build_model(self) -> LanguageModel:
openrouter_api_key=api_keys["openrouter_api_key"],
)

return output
return output
return output # type: ignore
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ class AmazonBedrockComponent(LCModelComponent):
Output(display_name="Language Model", name="model_output", method="build_model"),
]

def build_model(self) -> LanguageModel:
def build_model(self) -> LanguageModel: # type: ignore[type-var]
model_id = self.model_id
credentials_profile_name = self.credentials_profile_name
region_name = self.region_name
Expand All @@ -89,4 +89,4 @@ def build_model(self) -> LanguageModel:
)
except Exception as e:
raise ValueError("Could not connect to AmazonBedrock API.") from e
return output
return output # type: ignore
4 changes: 2 additions & 2 deletions src/backend/base/langflow/components/models/AnthropicModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ class AnthropicModelComponent(LCModelComponent):
Output(display_name="Language Model", name="model_output", method="build_model"),
]

def build_model(self) -> LanguageModel:
def build_model(self) -> LanguageModel: # type: ignore[type-var]
model = self.model
anthropic_api_key = self.anthropic_api_key
max_tokens = self.max_tokens
Expand All @@ -83,7 +83,7 @@ def build_model(self) -> LanguageModel:
except Exception as e:
raise ValueError("Could not connect to Anthropic API.") from e

return output
return output # type: ignore

def _get_exception_message(self, exception: Exception) -> str | None:
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ class AzureChatOpenAIComponent(LCModelComponent):
Output(display_name="Language Model", name="model_output", method="model_response"),
]

def model_response(self) -> LanguageModel:
def model_response(self) -> LanguageModel: # type: ignore[type-var]
model = self.model
azure_endpoint = self.azure_endpoint
azure_deployment = self.azure_deployment
Expand Down Expand Up @@ -107,4 +107,4 @@ def model_response(self) -> LanguageModel:
except Exception as e:
raise ValueError("Could not connect to AzureOpenAI API.") from e

return output
return output # type: ignore
2 changes: 1 addition & 1 deletion src/backend/base/langflow/components/models/CohereModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,4 +51,4 @@ def build_model(self) -> LanguageModel | BaseChatModel:
cohere_api_key=api_key,
)

return output
return output # type: ignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,7 @@
from langflow.base.constants import STREAM_INFO_TEXT
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs import (
BoolInput,
DropdownInput,
FloatInput,
IntInput,
MessageInput,
SecretStrInput,
StrInput,
)
from langflow.inputs import BoolInput, DropdownInput, FloatInput, IntInput, MessageInput, SecretStrInput, StrInput


class GoogleGenerativeAIComponent(LCModelComponent):
Expand Down Expand Up @@ -66,7 +58,7 @@ class GoogleGenerativeAIComponent(LCModelComponent):
),
]

def build_model(self) -> LanguageModel:
def build_model(self) -> LanguageModel: # type: ignore[type-var]
try:
from langchain_google_genai import ChatGoogleGenerativeAI
except ImportError:
Expand All @@ -90,4 +82,4 @@ def build_model(self) -> LanguageModel:
google_api_key=SecretStr(google_api_key),
)

return output
return output # type: ignore
4 changes: 2 additions & 2 deletions src/backend/base/langflow/components/models/GroqModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ class GroqModel(LCModelComponent):
),
]

def build_model(self) -> LanguageModel:
def build_model(self) -> LanguageModel: # type: ignore[type-var]
groq_api_key = self.groq_api_key
model_name = self.model_name
max_tokens = self.max_tokens
Expand All @@ -87,4 +87,4 @@ def build_model(self) -> LanguageModel:
streaming=stream,
)

return output
return output # type: ignore
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ class HuggingFaceEndpointsComponent(LCModelComponent):
Output(display_name="Language Model", name="model_output", method="build_model"),
]

def build_model(self) -> LanguageModel:
def build_model(self) -> LanguageModel: # type: ignore[type-var]
endpoint_url = self.endpoint_url
task = self.task
huggingfacehub_api_token = self.huggingfacehub_api_token
Expand All @@ -53,4 +53,4 @@ def build_model(self) -> LanguageModel:
raise ValueError("Could not connect to HuggingFace Endpoints API.") from e

output = ChatHuggingFace(llm=llm)
return output
return output # type: ignore
4 changes: 2 additions & 2 deletions src/backend/base/langflow/components/models/MistralModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ class MistralAIModelComponent(LCModelComponent):
Output(display_name="Language Model", name="model_output", method="build_model"),
]

def build_model(self) -> LanguageModel:
def build_model(self) -> LanguageModel: # type: ignore[type-var]
mistral_api_key = self.mistral_api_key
temperature = self.temperature
model_name = self.model_name
Expand Down Expand Up @@ -102,4 +102,4 @@ def build_model(self) -> LanguageModel:
safe_mode=safe_mode,
)

return output
return output # type: ignore
4 changes: 2 additions & 2 deletions src/backend/base/langflow/components/models/OllamaModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,7 @@ def get_model(self, url: str) -> list[str]:
Output(display_name="Language Model", name="model_output", method="build_model"),
]

def build_model(self) -> LanguageModel:
def build_model(self) -> LanguageModel: # type: ignore[type-var]
# Mapping mirostat settings to their corresponding values
mirostat_options = {"Mirostat": 1, "Mirostat 2.0": 2}

Expand Down Expand Up @@ -272,4 +272,4 @@ def build_model(self) -> LanguageModel:
except Exception as e:
raise ValueError("Could not initialize Ollama LLM.") from e

return output
return output # type: ignore
6 changes: 3 additions & 3 deletions src/backend/base/langflow/components/models/OpenAIModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,8 @@ class OpenAIModelComponent(LCModelComponent):
),
]

def build_model(self) -> LanguageModel:
# self.output_schea is a list of dictionaries
def build_model(self) -> LanguageModel: # type: ignore[type-var]
# self.output_schea is a list of dictionarie s
# let's convert it to a dictionary
output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})
openai_api_key = self.openai_api_key
Expand Down Expand Up @@ -112,7 +112,7 @@ def build_model(self) -> LanguageModel:
else:
output = output.bind(response_format={"type": "json_object"}) # type: ignore

return output
return output # type: ignore

def _get_exception_message(self, e: Exception):
"""
Expand Down
4 changes: 2 additions & 2 deletions src/backend/base/langflow/components/models/VertexAiModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ class ChatVertexAIComponent(LCModelComponent):
Output(display_name="Language Model", name="model_output", method="build_model"),
]

def build_model(self) -> LanguageModel:
def build_model(self) -> LanguageModel: # type: ignore[type-var]
credentials = self.credentials
location = self.location
max_output_tokens = self.max_output_tokens
Expand All @@ -75,4 +75,4 @@ def build_model(self) -> LanguageModel:
verbose=verbose,
)

return output
return output # type: ignore
2 changes: 2 additions & 0 deletions src/backend/base/langflow/field_typing/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
TextSplitter,
Tool,
VectorStore,
LanguageModel,
)
from .range_spec import RangeSpec

Expand Down Expand Up @@ -84,4 +85,5 @@ def __getattr__(name: str) -> Any:
"BaseChatModel",
"Retriever",
"Text",
"LanguageModel",
]

0 comments on commit c337675

Please sign in to comment.