From f727bb5789f7f792cc2db7d833da3561e92ca028 Mon Sep 17 00:00:00 2001 From: Christophe Bornet Date: Tue, 1 Oct 2024 17:38:32 +0200 Subject: [PATCH] feat: Add ruff rules for error messages (EM) (#3978) Add ruff rules for error messages (EM) --- src/backend/base/langflow/__main__.py | 3 +- src/backend/base/langflow/api/utils.py | 9 +- src/backend/base/langflow/api/v1/chat.py | 12 +- src/backend/base/langflow/api/v1/endpoints.py | 25 ++-- src/backend/base/langflow/api/v1/schemas.py | 6 +- .../base/langflow/base/agents/agent.py | 12 +- .../base/langflow/base/agents/crewai/crew.py | 3 +- .../base/langflow/base/chains/model.py | 6 +- src/backend/base/langflow/base/curl/parse.py | 3 +- src/backend/base/langflow/base/data/utils.py | 9 +- .../base/langflow/base/embeddings/model.py | 9 +- src/backend/base/langflow/base/io/chat.py | 6 +- .../base/langchain_utilities/model.py | 6 +- .../base/langflow/base/memory/model.py | 6 +- .../base/langflow/base/models/model.py | 9 +- .../base/langflow/base/prompts/api_utils.py | 6 +- .../base/langflow/base/textsplitters/model.py | 6 +- .../langflow/base/tools/component_tool.py | 3 +- .../base/langflow/base/tools/flow_tool.py | 16 +-- .../base/langflow/base/vectorstores/model.py | 18 ++- .../components/agents/OpenAIToolsAgent.py | 3 +- .../components/agents/ToolCallingAgent.py | 3 +- .../langflow/components/agents/XMLAgent.py | 3 +- .../components/astra_assistants/getenvvar.py | 3 +- .../components/astra_assistants/run.py | 3 +- .../components/chains/SQLGenerator.py | 6 +- .../langflow/components/data/APIRequest.py | 9 +- .../base/langflow/components/data/File.py | 9 +- .../base/langflow/components/data/Gmail.py | 20 ++-- .../langflow/components/data/GoogleDrive.py | 17 +-- .../base/langflow/components/data/URL.py | 3 +- .../deactivated/ChatLiteLLMModel.py | 11 +- .../deactivated/ExtractKeyFromData.py | 3 +- .../components/deactivated/SubFlow.py | 3 +- .../documentloaders/AssemblyAILeMUR.py | 6 +- .../embeddings/AzureOpenAIEmbeddings.py | 3 +- .../GoogleGenerativeAIEmbeddings.py | 6 +- .../HuggingFaceInferenceAPIEmbeddings.py | 15 ++- .../embeddings/MistalAIEmbeddings.py | 3 +- .../components/embeddings/NVIDIAEmbeddings.py | 9 +- .../components/embeddings/OllamaEmbeddings.py | 3 +- .../embeddings/VertexAIEmbeddings.py | 5 +- .../components/helpers/ParseJSONData.py | 3 +- .../langchain_utilities/FirecrawlCrawlApi.py | 3 +- .../langchain_utilities/FirecrawlScrapeApi.py | 3 +- .../JSONDocumentBuilder.py | 3 +- .../langchain_utilities/SpiderTool.py | 6 +- .../components/memories/AstraDBChatMemory.py | 3 +- .../memories/CassandraChatMemory.py | 5 +- .../components/memories/ZepChatMemory.py | 5 +- .../components/models/AmazonBedrockModel.py | 6 +- .../components/models/AnthropicModel.py | 8 +- .../components/models/AzureOpenAIModel.py | 3 +- .../models/BaiduQianfanChatModel.py | 3 +- .../models/GoogleGenerativeAIModel.py | 3 +- .../components/models/HuggingFaceModel.py | 3 +- .../langflow/components/models/NvidiaModel.py | 6 +- .../langflow/components/models/OllamaModel.py | 6 +- .../components/models/VertexAiModel.py | 5 +- .../components/prompts/LangChainHubPrompt.py | 3 +- .../components/prototypes/CreateData.py | 6 +- .../components/prototypes/FlowTool.py | 6 +- .../components/prototypes/JSONCleaner.py | 14 ++- .../langflow/components/prototypes/RunFlow.py | 3 +- .../components/prototypes/RunnableExecutor.py | 3 +- .../components/prototypes/SQLExecutor.py | 3 +- .../components/prototypes/SelectData.py | 3 +- .../langflow/components/prototypes/SubFlow.py | 3 +- .../components/prototypes/UpdateData.py | 6 +- .../components/retrievers/AmazonKendra.py | 3 +- .../components/retrievers/CohereRerank.py | 3 +- .../components/retrievers/MetalRetriever.py | 3 +- .../components/retrievers/NvidiaRerank.py | 9 +- .../retrievers/SelfQueryRetriever.py | 3 +- .../retrievers/VectaraSelfQueryRetriver.py | 3 +- .../components/tools/GleanSearchAPI.py | 3 +- .../components/tools/GoogleSearchAPI.py | 3 +- .../tools/PythonCodeStructuredTool.py | 6 +- .../components/tools/PythonREPLTool.py | 6 +- .../langflow/components/tools/SearXNGTool.py | 3 +- .../components/vectorstores/AstraDB.py | 18 ++- .../components/vectorstores/Cassandra.py | 11 +- .../components/vectorstores/CassandraGraph.py | 10 +- .../components/vectorstores/Chroma.py | 6 +- .../components/vectorstores/Clickhouse.py | 8 +- .../components/vectorstores/Couchbase.py | 8 +- .../langflow/components/vectorstores/FAISS.py | 9 +- .../langflow/components/vectorstores/HCD.py | 23 ++-- .../components/vectorstores/Milvus.py | 3 +- .../vectorstores/MongoDBAtlasVector.py | 6 +- .../components/vectorstores/Qdrant.py | 3 +- .../langflow/components/vectorstores/Redis.py | 3 +- .../components/vectorstores/Vectara.py | 3 +- .../components/vectorstores/Weaviate.py | 3 +- .../components/vectorstores/vectara_rag.py | 3 +- .../base/langflow/custom/attributes.py | 3 +- .../custom/code_parser/code_parser.py | 3 +- .../custom/custom_component/component.py | 67 +++++++---- .../custom_component/custom_component.py | 48 +++++--- .../directory_reader/directory_reader.py | 3 +- src/backend/base/langflow/custom/utils.py | 15 ++- .../base/langflow/events/event_manager.py | 15 ++- .../base/langflow/field_typing/range_spec.py | 9 +- src/backend/base/langflow/graph/edge/base.py | 31 +++-- .../base/langflow/graph/edge/schema.py | 3 +- src/backend/base/langflow/graph/graph/base.py | 110 ++++++++++++------ .../base/langflow/graph/graph/state_model.py | 3 +- .../base/langflow/graph/graph/utils.py | 12 +- .../base/langflow/graph/state/model.py | 15 ++- .../base/langflow/graph/vertex/base.py | 35 ++++-- .../base/langflow/graph/vertex/types.py | 24 ++-- src/backend/base/langflow/helpers/data.py | 3 +- src/backend/base/langflow/helpers/flow.py | 18 ++- .../base/langflow/initial_setup/setup.py | 9 +- .../base/langflow/inputs/input_mixin.py | 12 +- src/backend/base/langflow/inputs/inputs.py | 36 ++++-- .../base/langflow/inputs/validators.py | 3 +- .../langflow/interface/initialize/loading.py | 12 +- src/backend/base/langflow/interface/utils.py | 6 +- src/backend/base/langflow/io/schema.py | 6 +- src/backend/base/langflow/load/load.py | 3 +- src/backend/base/langflow/load/utils.py | 21 ++-- src/backend/base/langflow/main.py | 9 +- src/backend/base/langflow/memory.py | 6 +- .../base/langflow/processing/process.py | 6 +- src/backend/base/langflow/schema/data.py | 9 +- src/backend/base/langflow/schema/dotdict.py | 6 +- src/backend/base/langflow/schema/image.py | 3 +- src/backend/base/langflow/schema/message.py | 12 +- src/backend/base/langflow/schema/table.py | 3 +- .../base/langflow/services/auth/utils.py | 3 +- .../base/langflow/services/cache/factory.py | 3 +- .../base/langflow/services/cache/service.py | 11 +- .../base/langflow/services/cache/utils.py | 9 +- .../base/langflow/services/chat/cache.py | 6 +- .../langflow/services/database/factory.py | 3 +- .../services/database/models/api_key/crud.py | 6 +- .../services/database/models/flow/model.py | 30 +++-- .../services/database/models/flow/utils.py | 3 +- .../services/database/models/message/crud.py | 3 +- .../services/database/models/message/model.py | 3 +- .../langflow/services/database/service.py | 21 ++-- .../base/langflow/services/database/utils.py | 6 +- src/backend/base/langflow/services/factory.py | 6 +- src/backend/base/langflow/services/manager.py | 8 +- .../base/langflow/services/settings/base.py | 6 +- .../langflow/services/settings/manager.py | 6 +- .../langflow/services/settings/service.py | 3 +- .../base/langflow/services/storage/local.py | 6 +- .../base/langflow/services/store/service.py | 77 +++++++----- .../langflow/services/task/backends/celery.py | 3 +- .../base/langflow/services/task/service.py | 3 +- .../services/telemetry/opentelemetry.py | 30 +++-- .../langflow/services/tracing/langfuse.py | 3 +- src/backend/base/langflow/services/utils.py | 15 ++- .../langflow/services/variable/kubernetes.py | 9 +- .../services/variable/kubernetes_secrets.py | 3 +- .../langflow/services/variable/service.py | 15 ++- .../base/langflow/template/field/base.py | 9 +- .../langflow/template/frontend_node/base.py | 8 +- .../base/langflow/template/template/base.py | 9 +- .../base/langflow/utils/concurrency.py | 3 +- src/backend/base/langflow/utils/payload.py | 3 +- src/backend/base/langflow/utils/schemas.py | 9 +- src/backend/base/langflow/utils/util.py | 9 +- src/backend/base/langflow/utils/validate.py | 24 ++-- src/backend/base/langflow/utils/version.py | 3 +- src/backend/base/langflow/worker.py | 3 +- src/backend/base/pyproject.toml | 1 + 169 files changed, 1011 insertions(+), 554 deletions(-) diff --git a/src/backend/base/langflow/__main__.py b/src/backend/base/langflow/__main__.py index b9bda3211da0..7d7d491331bf 100644 --- a/src/backend/base/langflow/__main__.py +++ b/src/backend/base/langflow/__main__.py @@ -403,7 +403,8 @@ def superuser( if result: typer.echo("Default folder created successfully.") else: - raise RuntimeError("Could not create default folder.") + msg = "Could not create default folder." + raise RuntimeError(msg) typer.echo("Superuser created successfully.") else: diff --git a/src/backend/base/langflow/api/utils.py b/src/backend/base/langflow/api/utils.py index 87e9a7fd6d35..9a8380985468 100644 --- a/src/backend/base/langflow/api/utils.py +++ b/src/backend/base/langflow/api/utils.py @@ -135,7 +135,8 @@ async def build_graph_from_data(flow_id: str, payload: dict, **kwargs): for vertex_id in graph._has_session_id_vertices: vertex = graph.get_vertex(vertex_id) if vertex is None: - raise ValueError(f"Vertex {vertex_id} not found") + msg = f"Vertex {vertex_id} not found" + raise ValueError(msg) if not vertex._raw_params.get("session_id"): vertex.update_raw_params({"session_id": flow_id}, overwrite=True) @@ -150,7 +151,8 @@ async def build_graph_from_db_no_cache(flow_id: str, session: Session): """Build and cache the graph.""" flow: Flow | None = session.get(Flow, flow_id) if not flow or not flow.data: - raise ValueError("Invalid flow ID") + msg = "Invalid flow ID" + raise ValueError(msg) return await build_graph_from_data(flow_id, flow.data, flow_name=flow.name, user_id=str(flow.user_id)) @@ -260,4 +262,5 @@ async def cascade_delete_flow(session: Session, flow: Flow): session.exec(delete(VertexBuildTable).where(VertexBuildTable.flow_id == flow.id)) # type: ignore session.exec(delete(Flow).where(Flow.id == flow.id)) # type: ignore except Exception as e: - raise RuntimeError(f"Unable to cascade delete flow: ${flow.id}", e) + msg = f"Unable to cascade delete flow: ${flow.id}" + raise RuntimeError(msg, e) diff --git a/src/backend/base/langflow/api/v1/chat.py b/src/backend/base/langflow/api/v1/chat.py index 7edc9e9d4ed2..8b4003e0c3d0 100644 --- a/src/backend/base/langflow/api/v1/chat.py +++ b/src/backend/base/langflow/api/v1/chat.py @@ -340,7 +340,8 @@ async def build_vertices( vertex_build_response_json = vertex_build_response.model_dump_json() build_data = json.loads(vertex_build_response_json) except Exception as exc: - raise ValueError(f"Error serializing vertex build response: {exc}") from exc + msg = f"Error serializing vertex build response: {exc}" + raise ValueError(msg) from exc event_manager.on_end_vertex(data={"build_data": build_data}) await client_consumed_queue.get() if vertex_build_response.valid: @@ -652,13 +653,15 @@ async def stream_vertex(): cache = await chat_service.get_cache(flow_id_str) if not cache: # If there's no cache - raise ValueError(f"No cache found for {flow_id_str}.") + msg = f"No cache found for {flow_id_str}." + raise ValueError(msg) else: graph = cache.get("result") vertex: InterfaceVertex = graph.get_vertex(vertex_id) if not hasattr(vertex, "stream"): - raise ValueError(f"Vertex {vertex_id} does not support streaming") + msg = f"Vertex {vertex_id} does not support streaming" + raise ValueError(msg) if isinstance(vertex._built_result, str) and vertex._built_result: stream_data = StreamData( event="message", @@ -691,7 +694,8 @@ async def stream_vertex(): ) yield str(stream_data) else: - raise ValueError(f"No result found for vertex {vertex_id}") + msg = f"No result found for vertex {vertex_id}" + raise ValueError(msg) except Exception as exc: logger.exception(f"Error building Component: {exc}") diff --git a/src/backend/base/langflow/api/v1/endpoints.py b/src/backend/base/langflow/api/v1/endpoints.py index 025464fbfe64..ad323ada522e 100644 --- a/src/backend/base/langflow/api/v1/endpoints.py +++ b/src/backend/base/langflow/api/v1/endpoints.py @@ -94,17 +94,15 @@ def validate_input_and_tweaks(input_request: SimplifiedAPIRequest): has_input_value = value.get("input_value") is not None input_value_is_chat = input_request.input_value is not None and input_request.input_type == "chat" if has_input_value and input_value_is_chat: - raise InvalidChatInputException( - "If you pass an input_value to the chat input, you cannot pass a tweak with the same name." - ) + msg = "If you pass an input_value to the chat input, you cannot pass a tweak with the same name." + raise InvalidChatInputException(msg) elif "Text Input" in key or "TextInput" in key: if isinstance(value, dict): has_input_value = value.get("input_value") is not None input_value_is_text = input_request.input_value is not None and input_request.input_type == "text" if has_input_value and input_value_is_text: - raise InvalidChatInputException( - "If you pass an input_value to the text input, you cannot pass a tweak with the same name." - ) + msg = "If you pass an input_value to the text input, you cannot pass a tweak with the same name." + raise InvalidChatInputException(msg) async def simple_run_flow( @@ -120,7 +118,8 @@ async def simple_run_flow( user_id = api_key_user.id if api_key_user else None flow_id_str = str(flow.id) if flow.data is None: - raise ValueError(f"Flow {flow_id_str} has no data") + msg = f"Flow {flow_id_str} has no data" + raise ValueError(msg) graph_data = flow.data.copy() graph_data = process_tweaks(graph_data, input_request.tweaks or {}, stream=stream) graph = Graph.from_payload(graph_data, flow_id=flow_id_str, user_id=str(user_id), flow_name=flow.name) @@ -331,8 +330,9 @@ async def webhook_run_flow( data = await request.body() if not data: logger.error("Request body is empty") + msg = "Request body is empty. You should provide a JSON payload containing the flow ID." raise ValueError( - "Request body is empty. You should provide a JSON payload containing the flow ID.", + msg, ) # get all webhook components in the flow @@ -448,7 +448,8 @@ async def experimental_run_flow( session_data = await session_service.load_session(session_id, flow_id=flow_id_str) graph, artifacts = session_data if session_data else (None, None) if graph is None: - raise ValueError(f"Session {session_id} not found") + msg = f"Session {session_id} not found" + raise ValueError(msg) else: # Get the flow that matches the flow_id and belongs to the user # flow = session.query(Flow).filter(Flow.id == flow_id).filter(Flow.user_id == api_key_user.id).first() @@ -456,10 +457,12 @@ async def experimental_run_flow( select(Flow).where(Flow.id == flow_id_str).where(Flow.user_id == api_key_user.id) ).first() if flow is None: - raise ValueError(f"Flow {flow_id_str} not found") + msg = f"Flow {flow_id_str} not found" + raise ValueError(msg) if flow.data is None: - raise ValueError(f"Flow {flow_id_str} has no data") + msg = f"Flow {flow_id_str} has no data" + raise ValueError(msg) graph_data = flow.data graph_data = process_tweaks(graph_data, tweaks or {}) graph = Graph.from_payload(graph_data, flow_id=flow_id_str) diff --git a/src/backend/base/langflow/api/v1/schemas.py b/src/backend/base/langflow/api/v1/schemas.py index 5ce65e2fcd51..5149205e1fb7 100644 --- a/src/backend/base/langflow/api/v1/schemas.py +++ b/src/backend/base/langflow/api/v1/schemas.py @@ -110,7 +110,8 @@ class ChatResponse(ChatMessage): @classmethod def validate_message_type(cls, v): if v not in ["start", "stream", "end", "error", "info", "file"]: - raise ValueError("type must be start, stream, end, error, info, or file") + msg = "type must be start, stream, end, error, info, or file" + raise ValueError(msg) return v @@ -134,7 +135,8 @@ class FileResponse(ChatMessage): @classmethod def validate_data_type(cls, v): if v not in ["image", "csv"]: - raise ValueError("data_type must be image or csv") + msg = "data_type must be image or csv" + raise ValueError(msg) return v diff --git a/src/backend/base/langflow/base/agents/agent.py b/src/backend/base/langflow/base/agents/agent.py index 554e7241389d..1f1861be3249 100644 --- a/src/backend/base/langflow/base/agents/agent.py +++ b/src/backend/base/langflow/base/agents/agent.py @@ -67,9 +67,11 @@ def _validate_outputs(self): output_names = [output.name for output in self.outputs] for method_name in required_output_methods: if method_name not in output_names: - raise ValueError(f"Output with name '{method_name}' must be defined.") + msg = f"Output with name '{method_name}' must be defined." + raise ValueError(msg) elif not hasattr(self, method_name): - raise ValueError(f"Method '{method_name}' must be defined.") + msg = f"Method '{method_name}' must be defined." + raise ValueError(msg) def get_agent_kwargs(self, flatten: bool = False) -> dict: base = { @@ -102,7 +104,8 @@ async def run_agent(self, agent: AgentExecutor) -> Text: ) self.status = result if "output" not in result: - raise ValueError("Output key not found in result. Tried 'output'.") + msg = "Output key not found in result. Tried 'output'." + raise ValueError(msg) return cast(str, result.get("output")) @@ -148,7 +151,8 @@ async def run_agent( ) self.status = result if "output" not in result: - raise ValueError("Output key not found in result. Tried 'output'.") + msg = "Output key not found in result. Tried 'output'." + raise ValueError(msg) return cast(str, result.get("output")) diff --git a/src/backend/base/langflow/base/agents/crewai/crew.py b/src/backend/base/langflow/base/agents/crewai/crew.py index 15c0a2d363de..360fe3cc3f48 100644 --- a/src/backend/base/langflow/base/agents/crewai/crew.py +++ b/src/backend/base/langflow/base/agents/crewai/crew.py @@ -46,7 +46,8 @@ def get_tasks_and_agents(self) -> tuple[list[Task], list[Agent]]: return self.tasks, self.agents def build_crew(self) -> Crew: - raise NotImplementedError("build_crew must be implemented in subclasses") + msg = "build_crew must be implemented in subclasses" + raise NotImplementedError(msg) def get_task_callback( self, diff --git a/src/backend/base/langflow/base/chains/model.py b/src/backend/base/langflow/base/chains/model.py index efbe4ea19225..a8143bd98dca 100644 --- a/src/backend/base/langflow/base/chains/model.py +++ b/src/backend/base/langflow/base/chains/model.py @@ -12,6 +12,8 @@ def _validate_outputs(self): output_names = [output.name for output in self.outputs] for method_name in required_output_methods: if method_name not in output_names: - raise ValueError(f"Output with name '{method_name}' must be defined.") + msg = f"Output with name '{method_name}' must be defined." + raise ValueError(msg) elif not hasattr(self, method_name): - raise ValueError(f"Method '{method_name}' must be defined.") + msg = f"Method '{method_name}' must be defined." + raise ValueError(msg) diff --git a/src/backend/base/langflow/base/curl/parse.py b/src/backend/base/langflow/base/curl/parse.py index bbc85a2b3b3b..055f92499527 100644 --- a/src/backend/base/langflow/base/curl/parse.py +++ b/src/backend/base/langflow/base/curl/parse.py @@ -49,7 +49,8 @@ def parse_curl_command(curl_command): tokens = shlex.split(normalize_newlines(curl_command)) tokens = [token for token in tokens if token and token != " "] if tokens and "curl" not in tokens[0]: - raise ValueError("Invalid curl command") + msg = "Invalid curl command" + raise ValueError(msg) args_template = { "command": None, "url": None, diff --git a/src/backend/base/langflow/base/data/utils.py b/src/backend/base/langflow/base/data/utils.py index 7553d047cb52..ea7fb896e72d 100644 --- a/src/backend/base/langflow/base/data/utils.py +++ b/src/backend/base/langflow/base/data/utils.py @@ -53,7 +53,8 @@ def retrieve_file_paths( ) -> list[str]: path_obj = Path(path) if not path_obj.exists() or not path_obj.is_dir(): - raise ValueError(f"Path {path} must exist and be a directory.") + msg = f"Path {path} must exist and be a directory." + raise ValueError(msg) def match_types(p: Path) -> bool: return any(p.suffix == f".{t}" for t in types) if types else True @@ -83,7 +84,8 @@ def partition_file_to_data(file_path: str, silent_errors: bool) -> Data | None: elements = partition(file_path) except Exception as e: if not silent_errors: - raise ValueError(f"Error loading file {file_path}: {e}") from e + msg = f"Error loading file {file_path}: {e}" + raise ValueError(msg) from e return None # Create a Data @@ -147,7 +149,8 @@ def parse_text_file_to_data(file_path: str, silent_errors: bool) -> Data | None: text = ET.tostring(xml_element, encoding="unicode") except Exception as e: if not silent_errors: - raise ValueError(f"Error loading file {file_path}: {e}") from e + msg = f"Error loading file {file_path}: {e}" + raise ValueError(msg) from e return None record = Data(data={"file_path": file_path, "text": text}) diff --git a/src/backend/base/langflow/base/embeddings/model.py b/src/backend/base/langflow/base/embeddings/model.py index f9059c608e07..ac9703378247 100644 --- a/src/backend/base/langflow/base/embeddings/model.py +++ b/src/backend/base/langflow/base/embeddings/model.py @@ -15,9 +15,12 @@ def _validate_outputs(self): output_names = [output.name for output in self.outputs] for method_name in required_output_methods: if method_name not in output_names: - raise ValueError(f"Output with name '{method_name}' must be defined.") + msg = f"Output with name '{method_name}' must be defined." + raise ValueError(msg) elif not hasattr(self, method_name): - raise ValueError(f"Method '{method_name}' must be defined.") + msg = f"Method '{method_name}' must be defined." + raise ValueError(msg) def build_embeddings(self) -> Embeddings: - raise NotImplementedError("You must implement the build_embeddings method in your class.") + msg = "You must implement the build_embeddings method in your class." + raise NotImplementedError(msg) diff --git a/src/backend/base/langflow/base/io/chat.py b/src/backend/base/langflow/base/io/chat.py index 35e0fd1bb0b4..6f00d627a419 100644 --- a/src/backend/base/langflow/base/io/chat.py +++ b/src/backend/base/langflow/base/io/chat.py @@ -22,7 +22,8 @@ def store_message( flow_id=self.graph.flow_id, ) if len(messages) > 1: - raise ValueError("Only one message can be stored at a time.") + msg = "Only one message can be stored at a time." + raise ValueError(msg) stored_message = messages[0] if hasattr(self, "_event_manager") and self._event_manager and stored_message.id: if not isinstance(message.text, str): @@ -55,7 +56,8 @@ async def _handle_async_iterator(self, iterator: AsyncIterator, message: Message def _stream_message(self, message: Message, message_id: str) -> str: iterator = message.text if not isinstance(iterator, AsyncIterator | Iterator): - raise ValueError("The message must be an iterator or an async iterator.") + msg = "The message must be an iterator or an async iterator." + raise ValueError(msg) if isinstance(iterator, AsyncIterator): return run_until_complete(self._handle_async_iterator(iterator, message, message_id)) diff --git a/src/backend/base/langflow/base/langchain_utilities/model.py b/src/backend/base/langflow/base/langchain_utilities/model.py index 7e41a6a0e724..3ee750a2624b 100644 --- a/src/backend/base/langflow/base/langchain_utilities/model.py +++ b/src/backend/base/langflow/base/langchain_utilities/model.py @@ -19,9 +19,11 @@ def _validate_outputs(self): output_names = [output.name for output in self.outputs] for method_name in required_output_methods: if method_name not in output_names: - raise ValueError(f"Output with name '{method_name}' must be defined.") + msg = f"Output with name '{method_name}' must be defined." + raise ValueError(msg) elif not hasattr(self, method_name): - raise ValueError(f"Method '{method_name}' must be defined.") + msg = f"Method '{method_name}' must be defined." + raise ValueError(msg) @abstractmethod def run_model(self) -> Data | list[Data]: diff --git a/src/backend/base/langflow/base/memory/model.py b/src/backend/base/langflow/base/memory/model.py index 8621e8193d3e..8c9eef0e132e 100644 --- a/src/backend/base/langflow/base/memory/model.py +++ b/src/backend/base/langflow/base/memory/model.py @@ -22,9 +22,11 @@ def _validate_outputs(self): output_names = [output.name for output in self.outputs] for method_name in required_output_methods: if method_name not in output_names: - raise ValueError(f"Output with name '{method_name}' must be defined.") + msg = f"Output with name '{method_name}' must be defined." + raise ValueError(msg) elif not hasattr(self, method_name): - raise ValueError(f"Method '{method_name}' must be defined.") + msg = f"Method '{method_name}' must be defined." + raise ValueError(msg) def build_base_memory(self) -> BaseChatMemory: return ConversationBufferMemory(chat_memory=self.build_message_history()) diff --git a/src/backend/base/langflow/base/models/model.py b/src/backend/base/langflow/base/models/model.py index 6f202a8b462b..674673d173e8 100644 --- a/src/backend/base/langflow/base/models/model.py +++ b/src/backend/base/langflow/base/models/model.py @@ -48,9 +48,11 @@ def _validate_outputs(self): output_names = [output.name for output in self.outputs] for method_name in required_output_methods: if method_name not in output_names: - raise ValueError(f"Output with name '{method_name}' must be defined.") + msg = f"Output with name '{method_name}' must be defined." + raise ValueError(msg) elif not hasattr(self, method_name): - raise ValueError(f"Method '{method_name}' must be defined.") + msg = f"Method '{method_name}' must be defined." + raise ValueError(msg) def text_response(self) -> Message: input_value = self.input_value @@ -145,7 +147,8 @@ def get_chat_result( ): messages: list[BaseMessage] = [] if not input_value and not system_message: - raise ValueError("The message you want to send to the model is empty.") + msg = "The message you want to send to the model is empty." + raise ValueError(msg) system_message_added = False if input_value: if isinstance(input_value, Message): diff --git a/src/backend/base/langflow/base/prompts/api_utils.py b/src/backend/base/langflow/base/prompts/api_utils.py index 3876fe9ca325..7c55eb4f1291 100644 --- a/src/backend/base/langflow/base/prompts/api_utils.py +++ b/src/backend/base/langflow/base/prompts/api_utils.py @@ -127,14 +127,16 @@ def validate_prompt(prompt_template: str, silent_errors: bool = False) -> list[s # Check if there are invalid characters in the input_variables input_variables = _check_input_variables(input_variables) if any(var in _INVALID_NAMES for var in input_variables): - raise ValueError(f"Invalid input variables. None of the variables can be named {', '.join(input_variables)}. ") + msg = f"Invalid input variables. None of the variables can be named {', '.join(input_variables)}. " + raise ValueError(msg) try: PromptTemplate(template=prompt_template, input_variables=input_variables) except Exception as exc: logger.error(f"Invalid prompt: {exc}") if not silent_errors: - raise ValueError(f"Invalid prompt: {exc}") from exc + msg = f"Invalid prompt: {exc}" + raise ValueError(msg) from exc return input_variables diff --git a/src/backend/base/langflow/base/textsplitters/model.py b/src/backend/base/langflow/base/textsplitters/model.py index 058e21f426ce..481476d719c5 100644 --- a/src/backend/base/langflow/base/textsplitters/model.py +++ b/src/backend/base/langflow/base/textsplitters/model.py @@ -14,9 +14,11 @@ def _validate_outputs(self): output_names = [output.name for output in self.outputs] for method_name in required_output_methods: if method_name not in output_names: - raise ValueError(f"Output with name '{method_name}' must be defined.") + msg = f"Output with name '{method_name}' must be defined." + raise ValueError(msg) elif not hasattr(self, method_name): - raise ValueError(f"Method '{method_name}' must be defined.") + msg = f"Method '{method_name}' must be defined." + raise ValueError(msg) def build_document_transformer(self) -> BaseDocumentTransformer: return self.build_text_splitter() diff --git a/src/backend/base/langflow/base/tools/component_tool.py b/src/backend/base/langflow/base/tools/component_tool.py index 3f26cde5ef52..368583581dbf 100644 --- a/src/backend/base/langflow/base/tools/component_tool.py +++ b/src/backend/base/langflow/base/tools/component_tool.py @@ -35,7 +35,8 @@ def _run( results, _ = self.component(**kwargs) return results except Exception as e: - raise ToolException(f"Error running {self.name}: {e}") + msg = f"Error running {self.name}: {e}" + raise ToolException(msg) ComponentTool.update_forward_refs() diff --git a/src/backend/base/langflow/base/tools/flow_tool.py b/src/backend/base/langflow/base/tools/flow_tool.py index 52c1b2faee0e..02707d89af06 100644 --- a/src/backend/base/langflow/base/tools/flow_tool.py +++ b/src/backend/base/langflow/base/tools/flow_tool.py @@ -33,7 +33,8 @@ def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseMod elif self.graph is not None: return build_schema_from_inputs(self.name, get_flow_inputs(self.graph)) else: - raise ToolException("No input schema available.") + msg = "No input schema available." + raise ToolException(msg) def _run( self, @@ -45,9 +46,8 @@ def _run( if len(args_names) == len(args): kwargs = {arg["arg_name"]: arg_value for arg, arg_value in zip(args_names, args)} elif len(args_names) != len(args) and len(args) != 0: - raise ToolException( - "Number of arguments does not match the number of inputs. Pass keyword arguments instead." - ) + msg = "Number of arguments does not match the number of inputs. Pass keyword arguments instead." + raise ToolException(msg) tweaks = {arg["component_name"]: kwargs[arg["arg_name"]] for arg in args_names} run_outputs = run_until_complete( @@ -72,16 +72,16 @@ def validate_inputs(self, args_names: list[dict[str, str]], args: Any, kwargs: A """Validate the inputs.""" if len(args) > 0 and len(args) != len(args_names): - raise ToolException( - "Number of positional arguments does not match the number of inputs. Pass keyword arguments instead." - ) + msg = "Number of positional arguments does not match the number of inputs. Pass keyword arguments instead." + raise ToolException(msg) if len(args) == len(args_names): kwargs = {arg_name["arg_name"]: arg_value for arg_name, arg_value in zip(args_names, args)} missing_args = [arg["arg_name"] for arg in args_names if arg["arg_name"] not in kwargs] if missing_args: - raise ToolException(f"Missing required arguments: {', '.join(missing_args)}") + msg = f"Missing required arguments: {', '.join(missing_args)}" + raise ToolException(msg) return kwargs diff --git a/src/backend/base/langflow/base/vectorstores/model.py b/src/backend/base/langflow/base/vectorstores/model.py index f38c23d6e7a5..047cab5f312a 100644 --- a/src/backend/base/langflow/base/vectorstores/model.py +++ b/src/backend/base/langflow/base/vectorstores/model.py @@ -47,10 +47,11 @@ def __init_subclass__(cls, **kwargs): if hasattr(cls, "build_vector_store"): method = cls.build_vector_store if not hasattr(method, "_is_cached_vector_store_checked"): - raise TypeError( + msg = ( f"The method 'build_vector_store' in class {cls.__name__} " "must be decorated with @check_cached_vector_store" ) + raise TypeError(msg) trace_type = "retriever" outputs = [ @@ -81,9 +82,11 @@ def _validate_outputs(self): output_names = [output.name for output in self.outputs] for method_name in required_output_methods: if method_name not in output_names: - raise ValueError(f"Output with name '{method_name}' must be defined.") + msg = f"Output with name '{method_name}' must be defined." + raise ValueError(msg) elif not hasattr(self, method_name): - raise ValueError(f"Method '{method_name}' must be defined.") + msg = f"Method '{method_name}' must be defined." + raise ValueError(msg) def search_with_vector_store( self, @@ -112,7 +115,8 @@ def search_with_vector_store( if input_value and isinstance(input_value, str) and hasattr(vector_store, "search"): docs = vector_store.search(query=input_value, search_type=search_type.lower(), k=k, **kwargs) else: - raise ValueError("Invalid inputs provided.") + msg = "Invalid inputs provided." + raise ValueError(msg) data = docs_to_data(docs) self.status = data return data @@ -136,7 +140,8 @@ def build_base_retriever(self) -> Retriever: # type: ignore[type-var] self.status = "Retriever built successfully." return retriever else: - raise ValueError(f"Vector Store {vector_store.__class__.__name__} does not have an as_retriever method.") + msg = f"Vector Store {vector_store.__class__.__name__} does not have an as_retriever method." + raise ValueError(msg) def search_documents(self) -> list[Data]: """ @@ -175,4 +180,5 @@ def build_vector_store(self) -> VectorStore: """ Builds the Vector Store object. """ - raise NotImplementedError("build_vector_store method must be implemented.") + msg = "build_vector_store method must be implemented." + raise NotImplementedError(msg) diff --git a/src/backend/base/langflow/components/agents/OpenAIToolsAgent.py b/src/backend/base/langflow/components/agents/OpenAIToolsAgent.py index 80e3d036bcf5..20a6c2e243ac 100644 --- a/src/backend/base/langflow/components/agents/OpenAIToolsAgent.py +++ b/src/backend/base/langflow/components/agents/OpenAIToolsAgent.py @@ -38,7 +38,8 @@ def get_chat_history_data(self) -> list[Data] | None: def create_agent_runnable(self): if "input" not in self.user_prompt: - raise ValueError("Prompt must contain 'input' key.") + msg = "Prompt must contain 'input' key." + raise ValueError(msg) messages = [ ("system", self.system_prompt), ("placeholder", "{chat_history}"), diff --git a/src/backend/base/langflow/components/agents/ToolCallingAgent.py b/src/backend/base/langflow/components/agents/ToolCallingAgent.py index af63f2f82a10..7abb58b9b7f0 100644 --- a/src/backend/base/langflow/components/agents/ToolCallingAgent.py +++ b/src/backend/base/langflow/components/agents/ToolCallingAgent.py @@ -33,7 +33,8 @@ def get_chat_history_data(self) -> list[Data] | None: def create_agent_runnable(self): if "input" not in self.user_prompt: - raise ValueError("Prompt must contain 'input' key.") + msg = "Prompt must contain 'input' key." + raise ValueError(msg) messages = [ ("system", self.system_prompt), ("placeholder", "{chat_history}"), diff --git a/src/backend/base/langflow/components/agents/XMLAgent.py b/src/backend/base/langflow/components/agents/XMLAgent.py index 88e1fa4621a8..54447b22c35a 100644 --- a/src/backend/base/langflow/components/agents/XMLAgent.py +++ b/src/backend/base/langflow/components/agents/XMLAgent.py @@ -55,7 +55,8 @@ def get_chat_history_data(self) -> list[Data] | None: def create_agent_runnable(self): if "input" not in self.user_prompt: - raise ValueError("Prompt must contain 'input' key.") + msg = "Prompt must contain 'input' key." + raise ValueError(msg) messages = [ ("system", self.system_prompt), ("placeholder", "{chat_history}"), diff --git a/src/backend/base/langflow/components/astra_assistants/getenvvar.py b/src/backend/base/langflow/components/astra_assistants/getenvvar.py index e2616b912105..1a9d001ee9da 100644 --- a/src/backend/base/langflow/components/astra_assistants/getenvvar.py +++ b/src/backend/base/langflow/components/astra_assistants/getenvvar.py @@ -25,7 +25,8 @@ class GetEnvVar(Component): def process_inputs(self) -> Message: if self.env_var_name not in os.environ: - raise Exception(f"Environment variable {self.env_var_name} not set") + msg = f"Environment variable {self.env_var_name} not set" + raise Exception(msg) else: message = Message(text=os.environ[self.env_var_name]) return message diff --git a/src/backend/base/langflow/components/astra_assistants/run.py b/src/backend/base/langflow/components/astra_assistants/run.py index 506547f897dd..ce11a969910c 100644 --- a/src/backend/base/langflow/components/astra_assistants/run.py +++ b/src/backend/base/langflow/components/astra_assistants/run.py @@ -92,4 +92,5 @@ def on_exception(self, exception: Exception) -> None: return message except Exception as e: print(e) - raise Exception(f"Error running assistant: {e}") + msg = f"Error running assistant: {e}" + raise Exception(msg) diff --git a/src/backend/base/langflow/components/chains/SQLGenerator.py b/src/backend/base/langflow/components/chains/SQLGenerator.py index 4c08bd0cf1f5..ef59696f7462 100644 --- a/src/backend/base/langflow/components/chains/SQLGenerator.py +++ b/src/backend/base/langflow/components/chains/SQLGenerator.py @@ -34,14 +34,16 @@ def invoke_chain(self) -> Message: prompt_template = None if self.top_k < 1: - raise ValueError("Top K must be greater than 0.") + msg = "Top K must be greater than 0." + raise ValueError(msg) if not prompt_template: sql_query_chain = create_sql_query_chain(llm=self.llm, db=self.db, k=self.top_k) else: # Check if {question} is in the prompt if "{question}" not in prompt_template.template or "question" not in prompt_template.input_variables: - raise ValueError("Prompt must contain `{question}` to be used with Natural Language to SQL.") + msg = "Prompt must contain `{question}` to be used with Natural Language to SQL." + raise ValueError(msg) sql_query_chain = create_sql_query_chain(llm=self.llm, db=self.db, prompt=prompt_template, k=self.top_k) query_writer: Runnable = sql_query_chain | {"query": lambda x: x.replace("SQLQuery:", "").strip()} response = query_writer.invoke( diff --git a/src/backend/base/langflow/components/data/APIRequest.py b/src/backend/base/langflow/components/data/APIRequest.py index 287e879c25be..c498d9baa521 100644 --- a/src/backend/base/langflow/components/data/APIRequest.py +++ b/src/backend/base/langflow/components/data/APIRequest.py @@ -93,7 +93,8 @@ def parse_curl(self, curl: str, build_config: dotdict) -> dotdict: build_config["body"]["value"] = {} except Exception as exc: logger.error(f"Error parsing curl: {exc}") - raise ValueError(f"Error parsing curl: {exc}") + msg = f"Error parsing curl: {exc}" + raise ValueError(msg) return build_config def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): @@ -112,7 +113,8 @@ async def make_request( ) -> Data: method = method.upper() if method not in ["GET", "POST", "PATCH", "PUT", "DELETE"]: - raise ValueError(f"Unsupported method: {method}") + msg = f"Unsupported method: {method}" + raise ValueError(msg) if isinstance(body, str) and body: try: @@ -120,7 +122,8 @@ async def make_request( except Exception as e: logger.error(f"Error decoding JSON data: {e}") body = None - raise ValueError(f"Error decoding JSON data: {e}") + msg = f"Error decoding JSON data: {e}" + raise ValueError(msg) data = body if body else None diff --git a/src/backend/base/langflow/components/data/File.py b/src/backend/base/langflow/components/data/File.py index f36c1846e566..e11a496691de 100644 --- a/src/backend/base/langflow/components/data/File.py +++ b/src/backend/base/langflow/components/data/File.py @@ -33,16 +33,19 @@ class FileComponent(Component): def load_file(self) -> Data: if not self.path: - raise ValueError("Please, upload a file to use this component.") + msg = "Please, upload a file to use this component." + raise ValueError(msg) resolved_path = self.resolve_path(self.path) silent_errors = self.silent_errors extension = Path(resolved_path).suffix[1:].lower() if extension == "doc": - raise ValueError("doc files are not supported. Please save as .docx") + msg = "doc files are not supported. Please save as .docx" + raise ValueError(msg) if extension not in TEXT_FILE_TYPES: - raise ValueError(f"Unsupported file type: {extension}") + msg = f"Unsupported file type: {extension}" + raise ValueError(msg) data = parse_text_file_to_data(resolved_path, silent_errors) self.status = data if data else "No data" diff --git a/src/backend/base/langflow/components/data/Gmail.py b/src/backend/base/langflow/components/data/Gmail.py index 6c450638bd9d..803b385627e0 100644 --- a/src/backend/base/langflow/components/data/Gmail.py +++ b/src/backend/base/langflow/components/data/Gmail.py @@ -95,7 +95,8 @@ def _extract_email_content(self, msg: Any) -> HumanMessage: if name == "From": from_email = values["value"] if from_email is None: - raise ValueError("From email not found.") + msg = "From email not found." + raise ValueError(msg) if "parts" in msg["payload"]: parts = msg["payload"]["parts"] @@ -113,7 +114,8 @@ def _extract_email_content(self, msg: Any) -> HumanMessage: additional_kwargs={"sender": from_email}, ) return message - raise ValueError("No plain text part found in the email.") + msg = "No plain text part found in the email." + raise ValueError(msg) def _get_message_data(self, service: Any, message: Any) -> ChatSession: msg = service.users().messages().get(userId="me", id=message["id"]).execute() @@ -141,7 +143,8 @@ def _get_message_data(self, service: Any, message: Any) -> ChatSession: if message_id == in_reply_to: response_email = message if response_email is None: - raise ValueError("Response email not found in the thread.") + msg = "Response email not found in the thread." + raise ValueError(msg) starter_content = self._extract_email_content(response_email) return ChatSession(messages=[starter_content, message_content]) else: @@ -172,7 +175,8 @@ def lazy_load(self) -> Iterator[ChatSession]: try: token_info = json.loads(json_string) except JSONDecodeError as e: - raise ValueError("Invalid JSON string") from e + msg = "Invalid JSON string" + raise ValueError(msg) from e creds = Credentials.from_authorized_user_info(token_info) @@ -182,11 +186,11 @@ def lazy_load(self) -> Iterator[ChatSession]: try: docs = loader.load() except RefreshError as e: - raise ValueError( - "Authentication error: Unable to refresh authentication token. Please try to reauthenticate." - ) from e + msg = "Authentication error: Unable to refresh authentication token. Please try to reauthenticate." + raise ValueError(msg) from e except Exception as e: - raise ValueError(f"Error loading documents: {e}") from e + msg = f"Error loading documents: {e}" + raise ValueError(msg) from e # Return the loaded documents self.status = docs diff --git a/src/backend/base/langflow/components/data/GoogleDrive.py b/src/backend/base/langflow/components/data/GoogleDrive.py index 131f0dc290af..39d05eec865a 100644 --- a/src/backend/base/langflow/components/data/GoogleDrive.py +++ b/src/backend/base/langflow/components/data/GoogleDrive.py @@ -44,7 +44,8 @@ def _load_credentials(self): if self.creds: return self.creds else: - raise ValueError("No credentials provided.") + msg = "No credentials provided." + raise ValueError(msg) class Config: arbitrary_types_allowed = True @@ -53,7 +54,8 @@ class Config: document_ids = [self.document_id] if len(document_ids) != 1: - raise ValueError("Expected a single document ID") + msg = "Expected a single document ID" + raise ValueError(msg) # TODO: Add validation to check if the document ID is valid @@ -61,7 +63,8 @@ class Config: try: token_info = json.loads(json_string) except JSONDecodeError as e: - raise ValueError("Invalid JSON string") from e + msg = "Invalid JSON string" + raise ValueError(msg) from e # Initialize the custom loader with the provided credentials and document IDs loader = CustomGoogleDriveLoader( @@ -73,11 +76,11 @@ class Config: docs = loader.load() # catch google.auth.exceptions.RefreshError except RefreshError as e: - raise ValueError( - "Authentication error: Unable to refresh authentication token. Please try to reauthenticate." - ) from e + msg = "Authentication error: Unable to refresh authentication token. Please try to reauthenticate." + raise ValueError(msg) from e except Exception as e: - raise ValueError(f"Error loading documents: {e}") from e + msg = f"Error loading documents: {e}" + raise ValueError(msg) from e assert len(docs) == 1, "Expected a single document to be loaded." diff --git a/src/backend/base/langflow/components/data/URL.py b/src/backend/base/langflow/components/data/URL.py index 66efb0ad7977..168a46d03804 100644 --- a/src/backend/base/langflow/components/data/URL.py +++ b/src/backend/base/langflow/components/data/URL.py @@ -65,7 +65,8 @@ def ensure_url(self, string: str) -> str: ) if not url_regex.match(string): - raise ValueError(f"Invalid URL: {string}") + msg = f"Invalid URL: {string}" + raise ValueError(msg) return string diff --git a/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py b/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py index a1cff2aed5fd..5199e6961731 100644 --- a/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py +++ b/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py @@ -125,9 +125,8 @@ def build_model(self) -> LanguageModel: # type: ignore[type-var] litellm.drop_params = True litellm.set_verbose = self.verbose except ImportError: - raise ChatLiteLLMException( - "Could not import litellm python package. " "Please install it with `pip install litellm`" - ) + msg = "Could not import litellm python package. " "Please install it with `pip install litellm`" + raise ChatLiteLLMException(msg) # Remove empty keys if "" in self.kwargs: del self.kwargs[""] @@ -136,9 +135,11 @@ def build_model(self) -> LanguageModel: # type: ignore[type-var] # Report missing fields for Azure provider if self.provider == "Azure": if "api_base" not in self.kwargs: - raise Exception("Missing api_base on kwargs") + msg = "Missing api_base on kwargs" + raise Exception(msg) if "api_version" not in self.model_kwargs: - raise Exception("Missing api_version on model_kwargs") + msg = "Missing api_version on model_kwargs" + raise Exception(msg) output = ChatLiteLLM( model=f"{self.provider.lower()}/{self.model}", client=None, diff --git a/src/backend/base/langflow/components/deactivated/ExtractKeyFromData.py b/src/backend/base/langflow/components/deactivated/ExtractKeyFromData.py index 01f7abd008c5..0dbb80c42b38 100644 --- a/src/backend/base/langflow/components/deactivated/ExtractKeyFromData.py +++ b/src/backend/base/langflow/components/deactivated/ExtractKeyFromData.py @@ -40,7 +40,8 @@ def build(self, data: Data, keys: list[str], silent_error: bool = True) -> Data: extracted_keys[key] = getattr(data, key) except AttributeError: if not silent_error: - raise KeyError(f"The key '{key}' does not exist in the data.") + msg = f"The key '{key}' does not exist in the data." + raise KeyError(msg) return_data = Data(data=extracted_keys) self.status = return_data return return_data diff --git a/src/backend/base/langflow/components/deactivated/SubFlow.py b/src/backend/base/langflow/components/deactivated/SubFlow.py index 763840deb093..74d162ca87ab 100644 --- a/src/backend/base/langflow/components/deactivated/SubFlow.py +++ b/src/backend/base/langflow/components/deactivated/SubFlow.py @@ -45,7 +45,8 @@ def update_build_config(self, build_config: dotdict, field_value: Any, field_nam try: flow_data = self.get_flow(field_value) if not flow_data: - raise ValueError(f"Flow {field_value} not found.") + msg = f"Flow {field_value} not found." + raise ValueError(msg) graph = Graph.from_payload(flow_data.data["data"]) # Get all inputs from the graph inputs = get_flow_inputs(graph) diff --git a/src/backend/base/langflow/components/documentloaders/AssemblyAILeMUR.py b/src/backend/base/langflow/components/documentloaders/AssemblyAILeMUR.py index 0400dbd9f452..4bfed6733b99 100644 --- a/src/backend/base/langflow/components/documentloaders/AssemblyAILeMUR.py +++ b/src/backend/base/langflow/components/documentloaders/AssemblyAILeMUR.py @@ -164,7 +164,8 @@ def perform_lemur_action(self, transcript_group: aai.TranscriptGroup, endpoint: max_output_size=self.max_output_size, ) else: - raise ValueError(f"Endpoint not supported: {endpoint}") + msg = f"Endpoint not supported: {endpoint}" + raise ValueError(msg) return result.dict() @@ -178,4 +179,5 @@ def get_final_model(self, model_name: str) -> aai.LemurModel: elif model_name == "claude3_sonnet": return aai.LemurModel.claude3_sonnet else: - raise ValueError(f"Model name not supported: {model_name}") + msg = f"Model name not supported: {model_name}" + raise ValueError(msg) diff --git a/src/backend/base/langflow/components/embeddings/AzureOpenAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/AzureOpenAIEmbeddings.py index 9e7d319a9d50..d5c742c02bd7 100644 --- a/src/backend/base/langflow/components/embeddings/AzureOpenAIEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/AzureOpenAIEmbeddings.py @@ -77,6 +77,7 @@ def build_embeddings(self) -> Embeddings: dimensions=self.dimensions or None, ) except Exception as e: - raise ValueError(f"Could not connect to AzureOpenAIEmbeddings API: {str(e)}") from e + msg = f"Could not connect to AzureOpenAIEmbeddings API: {str(e)}" + raise ValueError(msg) from e return embeddings diff --git a/src/backend/base/langflow/components/embeddings/GoogleGenerativeAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/GoogleGenerativeAIEmbeddings.py index 3476ef35bcdd..7236e0433ad2 100644 --- a/src/backend/base/langflow/components/embeddings/GoogleGenerativeAIEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/GoogleGenerativeAIEmbeddings.py @@ -32,7 +32,8 @@ class GoogleGenerativeAIEmbeddingsComponent(Component): def build_embeddings(self) -> Embeddings: if not self.api_key: - raise ValueError("API Key is required") + msg = "API Key is required" + raise ValueError(msg) class HotaGoogleGenerativeAIEmbeddings(GoogleGenerativeAIEmbeddings): def __init__(self, *args, **kwargs): @@ -85,7 +86,8 @@ def embed_documents( BatchEmbedContentsRequest(requests=requests, model=self.model) ) except Exception as e: - raise GoogleGenerativeAIError(f"Error embedding content: {e}") from e + msg = f"Error embedding content: {e}" + raise GoogleGenerativeAIError(msg) from e embeddings.extend([list(np.pad(e.values, (0, 768), "constant")) for e in result.embeddings]) return embeddings diff --git a/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py b/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py index 012c8ba497bf..ccc7bec68bb4 100644 --- a/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py @@ -46,22 +46,25 @@ class HuggingFaceInferenceAPIEmbeddingsComponent(LCEmbeddingsModel): def validate_inference_endpoint(self, inference_endpoint: str) -> bool: parsed_url = urlparse(inference_endpoint) if not all([parsed_url.scheme, parsed_url.netloc]): - raise ValueError( + msg = ( f"Invalid inference endpoint format: '{self.inference_endpoint}'. " "Please ensure the URL includes both a scheme (e.g., 'http://' or 'https://') and a domain name. " "Example: 'http://localhost:8080' or 'https://api.example.com'" ) + raise ValueError(msg) try: response = requests.get(f"{inference_endpoint}/health", timeout=5) except requests.RequestException: - raise ValueError( + msg = ( f"Inference endpoint '{inference_endpoint}' is not responding. " "Please ensure the URL is correct and the service is running." ) + raise ValueError(msg) if response.status_code != 200: - raise ValueError(f"HuggingFace health check failed: {response.status_code}") + msg = f"HuggingFace health check failed: {response.status_code}" + raise ValueError(msg) # returning True to solve linting error return True @@ -86,11 +89,13 @@ def build_embeddings(self) -> Embeddings: self.validate_inference_endpoint(api_url) api_key = SecretStr("DummyAPIKeyForLocalDeployment") elif not self.api_key: - raise ValueError("API Key is required for non-local inference endpoints") + msg = "API Key is required for non-local inference endpoints" + raise ValueError(msg) else: api_key = SecretStr(self.api_key) try: return self.create_huggingface_embeddings(api_key, api_url, self.model_name) except Exception as e: - raise ValueError("Could not connect to HuggingFace Inference API.") from e + msg = "Could not connect to HuggingFace Inference API." + raise ValueError(msg) from e diff --git a/src/backend/base/langflow/components/embeddings/MistalAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/MistalAIEmbeddings.py index f46a81f96ef9..ab71f873695a 100644 --- a/src/backend/base/langflow/components/embeddings/MistalAIEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/MistalAIEmbeddings.py @@ -43,7 +43,8 @@ class MistralAIEmbeddingsComponent(LCModelComponent): def build_embeddings(self) -> Embeddings: if not self.mistral_api_key: - raise ValueError("Mistral API Key is required") + msg = "Mistral API Key is required" + raise ValueError(msg) api_key = SecretStr(self.mistral_api_key) diff --git a/src/backend/base/langflow/components/embeddings/NVIDIAEmbeddings.py b/src/backend/base/langflow/components/embeddings/NVIDIAEmbeddings.py index 31a3450d8aa4..4090692e80b2 100644 --- a/src/backend/base/langflow/components/embeddings/NVIDIAEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/NVIDIAEmbeddings.py @@ -51,14 +51,16 @@ def update_build_config(self, build_config: dotdict, field_value: Any, field_nam build_config["model"]["options"] = ids build_config["model"]["value"] = ids[0] except Exception as e: - raise ValueError(f"Error getting model names: {e}") + msg = f"Error getting model names: {e}" + raise ValueError(msg) return build_config def build_embeddings(self) -> Embeddings: try: from langchain_nvidia_ai_endpoints import NVIDIAEmbeddings except ImportError: - raise ImportError("Please install langchain-nvidia-ai-endpoints to use the Nvidia model.") + msg = "Please install langchain-nvidia-ai-endpoints to use the Nvidia model." + raise ImportError(msg) try: output = NVIDIAEmbeddings( model=self.model, @@ -67,5 +69,6 @@ def build_embeddings(self) -> Embeddings: nvidia_api_key=self.nvidia_api_key, ) # type: ignore except Exception as e: - raise ValueError(f"Could not connect to NVIDIA API. Error: {e}") from e + msg = f"Could not connect to NVIDIA API. Error: {e}" + raise ValueError(msg) from e return output diff --git a/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py b/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py index 07ae73a533f1..104a732df3a9 100644 --- a/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py @@ -43,5 +43,6 @@ def build_embeddings(self) -> Embeddings: temperature=self.temperature, ) # type: ignore except Exception as e: - raise ValueError("Could not connect to Ollama API.") from e + msg = "Could not connect to Ollama API." + raise ValueError(msg) from e return output diff --git a/src/backend/base/langflow/components/embeddings/VertexAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/VertexAIEmbeddings.py index 5628203d8dbc..a0754e938ca3 100644 --- a/src/backend/base/langflow/components/embeddings/VertexAIEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/VertexAIEmbeddings.py @@ -39,9 +39,8 @@ def build_embeddings(self) -> Embeddings: try: from langchain_google_vertexai import VertexAIEmbeddings except ImportError: - raise ImportError( - "Please install the langchain-google-vertexai package to use the VertexAIEmbeddings component." - ) + msg = "Please install the langchain-google-vertexai package to use the VertexAIEmbeddings component." + raise ImportError(msg) from google.oauth2 import service_account diff --git a/src/backend/base/langflow/components/helpers/ParseJSONData.py b/src/backend/base/langflow/components/helpers/ParseJSONData.py index 97ad1b4978dd..f6cf2ec64ad9 100644 --- a/src/backend/base/langflow/components/helpers/ParseJSONData.py +++ b/src/backend/base/langflow/components/helpers/ParseJSONData.py @@ -62,7 +62,8 @@ def filter_data(self) -> list[Data]: try: to_filter_as_dict.append(json.loads(repair_json(f))) except JSONDecodeError as e: - raise ValueError(f"Invalid JSON: {e}") + msg = f"Invalid JSON: {e}" + raise ValueError(msg) full_filter_str = json.dumps(to_filter_as_dict) diff --git a/src/backend/base/langflow/components/langchain_utilities/FirecrawlCrawlApi.py b/src/backend/base/langflow/components/langchain_utilities/FirecrawlCrawlApi.py index 0db626bf6af5..3c7344d8dbec 100644 --- a/src/backend/base/langflow/components/langchain_utilities/FirecrawlCrawlApi.py +++ b/src/backend/base/langflow/components/langchain_utilities/FirecrawlCrawlApi.py @@ -57,9 +57,10 @@ def build( try: from firecrawl.firecrawl import FirecrawlApp # type: ignore except ImportError: - raise ImportError( + msg = ( "Could not import firecrawl integration package. " "Please install it with `pip install firecrawl-py`." ) + raise ImportError(msg) if crawlerOptions: crawler_options_dict = crawlerOptions.__dict__["data"]["text"] else: diff --git a/src/backend/base/langflow/components/langchain_utilities/FirecrawlScrapeApi.py b/src/backend/base/langflow/components/langchain_utilities/FirecrawlScrapeApi.py index 76f8230706c2..47de85fdd44d 100644 --- a/src/backend/base/langflow/components/langchain_utilities/FirecrawlScrapeApi.py +++ b/src/backend/base/langflow/components/langchain_utilities/FirecrawlScrapeApi.py @@ -50,9 +50,10 @@ def build( try: from firecrawl.firecrawl import FirecrawlApp # type: ignore except ImportError: - raise ImportError( + msg = ( "Could not import firecrawl integration package. " "Please install it with `pip install firecrawl-py`." ) + raise ImportError(msg) if extractorOptions: extractor_options_dict = extractorOptions.__dict__["data"]["text"] else: diff --git a/src/backend/base/langflow/components/langchain_utilities/JSONDocumentBuilder.py b/src/backend/base/langflow/components/langchain_utilities/JSONDocumentBuilder.py index e4d3300641d7..f4adb38e1db0 100644 --- a/src/backend/base/langflow/components/langchain_utilities/JSONDocumentBuilder.py +++ b/src/backend/base/langflow/components/langchain_utilities/JSONDocumentBuilder.py @@ -43,6 +43,7 @@ def build( elif isinstance(document, Document): documents = Document(page_content=orjson_dumps({key: document.page_content}, indent_2=False)) else: - raise TypeError(f"Expected Document or list of Documents, got {type(document)}") + msg = f"Expected Document or list of Documents, got {type(document)}" + raise TypeError(msg) self.repr_value = documents return documents diff --git a/src/backend/base/langflow/components/langchain_utilities/SpiderTool.py b/src/backend/base/langflow/components/langchain_utilities/SpiderTool.py index 0cef340af328..a0fd38fb260b 100644 --- a/src/backend/base/langflow/components/langchain_utilities/SpiderTool.py +++ b/src/backend/base/langflow/components/langchain_utilities/SpiderTool.py @@ -110,9 +110,11 @@ def crawl(self) -> list[Data]: elif self.mode == "crawl": result = app.crawl_url(self.url, parameters) else: - raise ValueError(f"Invalid mode: {self.mode}. Must be 'scrape' or 'crawl'.") + msg = f"Invalid mode: {self.mode}. Must be 'scrape' or 'crawl'." + raise ValueError(msg) except Exception as e: - raise Exception(f"Error: {str(e)}") + msg = f"Error: {str(e)}" + raise Exception(msg) records = [] diff --git a/src/backend/base/langflow/components/memories/AstraDBChatMemory.py b/src/backend/base/langflow/components/memories/AstraDBChatMemory.py index 3141a52e621a..ab1544e39bce 100644 --- a/src/backend/base/langflow/components/memories/AstraDBChatMemory.py +++ b/src/backend/base/langflow/components/memories/AstraDBChatMemory.py @@ -53,10 +53,11 @@ def build_message_history(self) -> BaseChatMessageHistory: try: from langchain_astradb.chat_message_histories import AstraDBChatMessageHistory except ImportError: - raise ImportError( + msg = ( "Could not import langchain Astra DB integration package. " "Please install it with `pip install langchain-astradb`." ) + raise ImportError(msg) memory = AstraDBChatMessageHistory( session_id=self.session_id, diff --git a/src/backend/base/langflow/components/memories/CassandraChatMemory.py b/src/backend/base/langflow/components/memories/CassandraChatMemory.py index 84cb7a46ee1f..e87d83b942f9 100644 --- a/src/backend/base/langflow/components/memories/CassandraChatMemory.py +++ b/src/backend/base/langflow/components/memories/CassandraChatMemory.py @@ -55,9 +55,8 @@ def build_message_history(self) -> BaseChatMessageHistory: try: import cassio except ImportError: - raise ImportError( - "Could not import cassio integration package. " "Please install it with `pip install cassio`." - ) + msg = "Could not import cassio integration package. " "Please install it with `pip install cassio`." + raise ImportError(msg) from uuid import UUID diff --git a/src/backend/base/langflow/components/memories/ZepChatMemory.py b/src/backend/base/langflow/components/memories/ZepChatMemory.py index e18c6d876bb2..1914b6e2b4e7 100644 --- a/src/backend/base/langflow/components/memories/ZepChatMemory.py +++ b/src/backend/base/langflow/components/memories/ZepChatMemory.py @@ -35,9 +35,8 @@ def build_message_history(self) -> BaseChatMessageHistory: zep_python.zep_client.API_BASE_PATH = self.api_base_path except ImportError: - raise ImportError( - "Could not import zep-python package. " "Please install it with `pip install zep-python`." - ) + msg = "Could not import zep-python package. " "Please install it with `pip install zep-python`." + raise ImportError(msg) zep_client = ZepClient(api_url=self.url, api_key=self.api_key) return ZepChatMessageHistory(session_id=self.session_id, zep_client=zep_client) diff --git a/src/backend/base/langflow/components/models/AmazonBedrockModel.py b/src/backend/base/langflow/components/models/AmazonBedrockModel.py index 955fd860983f..4d4f128d80a0 100644 --- a/src/backend/base/langflow/components/models/AmazonBedrockModel.py +++ b/src/backend/base/langflow/components/models/AmazonBedrockModel.py @@ -69,7 +69,8 @@ def build_model(self) -> LanguageModel: # type: ignore[type-var] try: from langchain_aws import ChatBedrock except ImportError: - raise ImportError("langchain_aws is not installed. Please install it with `pip install langchain_aws`.") + msg = "langchain_aws is not installed. Please install it with `pip install langchain_aws`." + raise ImportError(msg) if self.aws_access_key: import boto3 # type: ignore @@ -103,5 +104,6 @@ def build_model(self) -> LanguageModel: # type: ignore[type-var] streaming=self.stream, ) except Exception as e: - raise ValueError("Could not connect to AmazonBedrock API.") from e + msg = "Could not connect to AmazonBedrock API." + raise ValueError(msg) from e return output # type: ignore diff --git a/src/backend/base/langflow/components/models/AnthropicModel.py b/src/backend/base/langflow/components/models/AnthropicModel.py index ba8a9fe7e4c4..c7a6cbf85f3b 100644 --- a/src/backend/base/langflow/components/models/AnthropicModel.py +++ b/src/backend/base/langflow/components/models/AnthropicModel.py @@ -63,9 +63,8 @@ def build_model(self) -> LanguageModel: # type: ignore[type-var] try: from langchain_anthropic.chat_models import ChatAnthropic except ImportError: - raise ImportError( - "langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`." - ) + msg = "langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`." + raise ImportError(msg) model = self.model anthropic_api_key = self.anthropic_api_key max_tokens = self.max_tokens @@ -82,7 +81,8 @@ def build_model(self) -> LanguageModel: # type: ignore[type-var] streaming=self.stream, ) except Exception as e: - raise ValueError("Could not connect to Anthropic API.") from e + msg = "Could not connect to Anthropic API." + raise ValueError(msg) from e return output # type: ignore diff --git a/src/backend/base/langflow/components/models/AzureOpenAIModel.py b/src/backend/base/langflow/components/models/AzureOpenAIModel.py index a2455b6518e6..a5ff32d57c44 100644 --- a/src/backend/base/langflow/components/models/AzureOpenAIModel.py +++ b/src/backend/base/langflow/components/models/AzureOpenAIModel.py @@ -78,6 +78,7 @@ def build_model(self) -> LanguageModel: # type: ignore[type-var] streaming=stream, ) except Exception as e: - raise ValueError(f"Could not connect to AzureOpenAI API: {str(e)}") from e + msg = f"Could not connect to AzureOpenAI API: {str(e)}" + raise ValueError(msg) from e return output # type: ignore diff --git a/src/backend/base/langflow/components/models/BaiduQianfanChatModel.py b/src/backend/base/langflow/components/models/BaiduQianfanChatModel.py index 8fc27e33480a..8003f5ca4faf 100644 --- a/src/backend/base/langflow/components/models/BaiduQianfanChatModel.py +++ b/src/backend/base/langflow/components/models/BaiduQianfanChatModel.py @@ -97,6 +97,7 @@ def build_model(self) -> LanguageModel: # type: ignore[type-var] endpoint=endpoint, ) except Exception as e: - raise ValueError("Could not connect to Baidu Qianfan API.") from e + msg = "Could not connect to Baidu Qianfan API." + raise ValueError(msg) from e return output # type: ignore diff --git a/src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py b/src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py index f5a02d555a0f..02b5b839ee3e 100644 --- a/src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py +++ b/src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py @@ -63,7 +63,8 @@ def build_model(self) -> LanguageModel: # type: ignore[type-var] try: from langchain_google_genai import ChatGoogleGenerativeAI except ImportError: - raise ImportError("The 'langchain_google_genai' package is required to use the Google Generative AI model.") + msg = "The 'langchain_google_genai' package is required to use the Google Generative AI model." + raise ImportError(msg) google_api_key = self.google_api_key model = self.model diff --git a/src/backend/base/langflow/components/models/HuggingFaceModel.py b/src/backend/base/langflow/components/models/HuggingFaceModel.py index 06abac6f4928..44a9b642c4be 100644 --- a/src/backend/base/langflow/components/models/HuggingFaceModel.py +++ b/src/backend/base/langflow/components/models/HuggingFaceModel.py @@ -113,6 +113,7 @@ def build_model(self) -> LanguageModel: repetition_penalty=repetition_penalty, ) except Exception as e: - raise ValueError("Could not connect to HuggingFace Endpoints API.") from e + msg = "Could not connect to HuggingFace Endpoints API." + raise ValueError(msg) from e return llm diff --git a/src/backend/base/langflow/components/models/NvidiaModel.py b/src/backend/base/langflow/components/models/NvidiaModel.py index 16f809c9667c..7a5387c3210d 100644 --- a/src/backend/base/langflow/components/models/NvidiaModel.py +++ b/src/backend/base/langflow/components/models/NvidiaModel.py @@ -65,14 +65,16 @@ def update_build_config(self, build_config: dotdict, field_value: Any, field_nam build_config["model_name"]["options"] = ids build_config["model_name"]["value"] = ids[0] except Exception as e: - raise ValueError(f"Error getting model names: {e}") + msg = f"Error getting model names: {e}" + raise ValueError(msg) return build_config def build_model(self) -> LanguageModel: # type: ignore[type-var] try: from langchain_nvidia_ai_endpoints import ChatNVIDIA except ImportError: - raise ImportError("Please install langchain-nvidia-ai-endpoints to use the NVIDIA model.") + msg = "Please install langchain-nvidia-ai-endpoints to use the NVIDIA model." + raise ImportError(msg) nvidia_api_key = self.nvidia_api_key temperature = self.temperature model_name: str = self.model_name diff --git a/src/backend/base/langflow/components/models/OllamaModel.py b/src/backend/base/langflow/components/models/OllamaModel.py index fbfa6b2d12fb..8b5dddb610bd 100644 --- a/src/backend/base/langflow/components/models/OllamaModel.py +++ b/src/backend/base/langflow/components/models/OllamaModel.py @@ -67,7 +67,8 @@ def get_model(self, base_url_value: str) -> list[str]: model_names = [model["name"] for model in data.get("models", [])] return model_names except Exception as e: - raise ValueError("Could not retrieve models. Please, make sure Ollama is running.") from e + msg = "Could not retrieve models. Please, make sure Ollama is running." + raise ValueError(msg) from e inputs = LCModelComponent._base_inputs + [ StrInput( @@ -261,6 +262,7 @@ def build_model(self) -> LanguageModel: # type: ignore[type-var] try: output = ChatOllama(**llm_params) # type: ignore except Exception as e: - raise ValueError("Could not initialize Ollama LLM.") from e + msg = "Could not initialize Ollama LLM." + raise ValueError(msg) from e return output # type: ignore diff --git a/src/backend/base/langflow/components/models/VertexAiModel.py b/src/backend/base/langflow/components/models/VertexAiModel.py index 0c42caff44dc..7fff6ee708c0 100644 --- a/src/backend/base/langflow/components/models/VertexAiModel.py +++ b/src/backend/base/langflow/components/models/VertexAiModel.py @@ -42,9 +42,8 @@ def build_model(self) -> LanguageModel: try: from langchain_google_vertexai import ChatVertexAI except ImportError: - raise ImportError( - "Please install the langchain-google-vertexai package to use the VertexAIEmbeddings component." - ) + msg = "Please install the langchain-google-vertexai package to use the VertexAIEmbeddings component." + raise ImportError(msg) location = self.location or None if self.credentials: from google.cloud import aiplatform diff --git a/src/backend/base/langflow/components/prompts/LangChainHubPrompt.py b/src/backend/base/langflow/components/prompts/LangChainHubPrompt.py index df90865c723a..dc2265a6264d 100644 --- a/src/backend/base/langflow/components/prompts/LangChainHubPrompt.py +++ b/src/backend/base/langflow/components/prompts/LangChainHubPrompt.py @@ -110,7 +110,8 @@ def _fetch_langchain_hub_template(self): # Check if the api key is provided if not self.langchain_api_key: - raise ValueError("Please provide a LangChain API Key") + msg = "Please provide a LangChain API Key" + raise ValueError(msg) # Pull the prompt from LangChain Hub prompt_data = langchain.hub.pull(self.langchain_hub_prompt, api_key=self.langchain_api_key) diff --git a/src/backend/base/langflow/components/prototypes/CreateData.py b/src/backend/base/langflow/components/prototypes/CreateData.py index 8c5c3d54db13..aa48cfe1cdb3 100644 --- a/src/backend/base/langflow/components/prototypes/CreateData.py +++ b/src/backend/base/langflow/components/prototypes/CreateData.py @@ -50,7 +50,8 @@ def update_build_config(self, build_config: dotdict, field_value: Any, field_nam existing_fields = {} if field_value_int > 15: build_config["number_of_fields"]["value"] = 15 - raise ValueError("Number of fields cannot exceed 15. Try using a Component to combine two Data.") + msg = "Number of fields cannot exceed 15. Try using a Component to combine two Data." + raise ValueError(msg) if len(build_config) > len(default_keys): # back up the existing template fields for key in build_config.copy(): @@ -99,4 +100,5 @@ def validate_text_key(self): data_keys = self.get_data().keys() if self.text_key not in data_keys and self.text_key != "": formatted_data_keys = ", ".join(data_keys) - raise ValueError(f"Text Key: '{self.text_key}' not found in the Data keys: '{formatted_data_keys}'") + msg = f"Text Key: '{self.text_key}' not found in the Data keys: '{formatted_data_keys}'" + raise ValueError(msg) diff --git a/src/backend/base/langflow/components/prototypes/FlowTool.py b/src/backend/base/langflow/components/prototypes/FlowTool.py index 718330fedfe8..30b950965ff6 100644 --- a/src/backend/base/langflow/components/prototypes/FlowTool.py +++ b/src/backend/base/langflow/components/prototypes/FlowTool.py @@ -74,11 +74,13 @@ def update_build_config(self, build_config: dotdict, field_value: Any, field_nam def build_tool(self) -> Tool: FlowTool.update_forward_refs() if "flow_name" not in self._attributes or not self._attributes["flow_name"]: - raise ValueError("Flow name is required") + msg = "Flow name is required" + raise ValueError(msg) flow_name = self._attributes["flow_name"] flow_data = self.get_flow(flow_name) if not flow_data: - raise ValueError("Flow not found.") + msg = "Flow not found." + raise ValueError(msg) graph = Graph.from_payload(flow_data.data["data"]) try: graph.set_run_id(self.graph.run_id) diff --git a/src/backend/base/langflow/components/prototypes/JSONCleaner.py b/src/backend/base/langflow/components/prototypes/JSONCleaner.py index 9c3939843aba..f51379177e10 100644 --- a/src/backend/base/langflow/components/prototypes/JSONCleaner.py +++ b/src/backend/base/langflow/components/prototypes/JSONCleaner.py @@ -48,9 +48,8 @@ def clean_json(self) -> Message: try: from json_repair import repair_json # type: ignore except ImportError: - raise ImportError( - "Could not import the json_repair package." "Please install it with `pip install json_repair`." - ) + msg = "Could not import the json_repair package." "Please install it with `pip install json_repair`." + raise ImportError(msg) """Clean the input JSON string based on provided options and return the cleaned JSON string.""" json_str = self.json_str @@ -62,7 +61,8 @@ def clean_json(self) -> Message: start = json_str.find("{") end = json_str.rfind("}") if start == -1 or end == -1: - raise ValueError("Invalid JSON string: Missing '{' or '}'") + msg = "Invalid JSON string: Missing '{' or '}'" + raise ValueError(msg) json_str = json_str[start : end + 1] if remove_control_chars: @@ -78,7 +78,8 @@ def clean_json(self) -> Message: self.status = result return Message(text=result) except Exception as e: - raise ValueError(f"Error cleaning JSON string: {str(e)}") + msg = f"Error cleaning JSON string: {str(e)}" + raise ValueError(msg) def _remove_control_characters(self, s: str) -> str: """Remove control characters from the string.""" @@ -94,4 +95,5 @@ def _validate_json(self, s: str) -> str: json.loads(s) return s except json.JSONDecodeError as e: - raise ValueError(f"Invalid JSON string: {str(e)}") + msg = f"Invalid JSON string: {str(e)}" + raise ValueError(msg) diff --git a/src/backend/base/langflow/components/prototypes/RunFlow.py b/src/backend/base/langflow/components/prototypes/RunFlow.py index 56e5f74dbd3d..834a7b20f4de 100644 --- a/src/backend/base/langflow/components/prototypes/RunFlow.py +++ b/src/backend/base/langflow/components/prototypes/RunFlow.py @@ -49,7 +49,8 @@ def update_build_config(self, build_config: dotdict, field_value: Any, field_nam async def generate_results(self) -> list[Data]: if "flow_name" not in self._attributes or not self._attributes["flow_name"]: - raise ValueError("Flow name is required") + msg = "Flow name is required" + raise ValueError(msg) flow_name = self._attributes["flow_name"] results: list[RunOutputs | None] = await self.run_flow( diff --git a/src/backend/base/langflow/components/prototypes/RunnableExecutor.py b/src/backend/base/langflow/components/prototypes/RunnableExecutor.py index c4583b6b0303..db94363c9d07 100644 --- a/src/backend/base/langflow/components/prototypes/RunnableExecutor.py +++ b/src/backend/base/langflow/components/prototypes/RunnableExecutor.py @@ -118,7 +118,8 @@ def get_input_dict(self, runnable, input_key, input_value): async def build_executor(self) -> Message: input_dict, status = self.get_input_dict(self.runnable, self.input_key, self.input_value) if not isinstance(self.runnable, AgentExecutor): - raise ValueError("The runnable must be an AgentExecutor") + msg = "The runnable must be an AgentExecutor" + raise ValueError(msg) if self.use_stream: return self.astream_events(input_dict) diff --git a/src/backend/base/langflow/components/prototypes/SQLExecutor.py b/src/backend/base/langflow/components/prototypes/SQLExecutor.py index cafc92fb4095..4c86fe276fdf 100644 --- a/src/backend/base/langflow/components/prototypes/SQLExecutor.py +++ b/src/backend/base/langflow/components/prototypes/SQLExecutor.py @@ -48,7 +48,8 @@ def build( try: database = SQLDatabase.from_uri(database_url) except Exception as e: - raise ValueError(f"An error occurred while connecting to the database: {e}") + msg = f"An error occurred while connecting to the database: {e}" + raise ValueError(msg) try: tool = QuerySQLDataBaseTool(db=database) result = tool.run(query, include_columns=include_columns) diff --git a/src/backend/base/langflow/components/prototypes/SelectData.py b/src/backend/base/langflow/components/prototypes/SelectData.py index 0724df3b5019..3d17d9103eed 100644 --- a/src/backend/base/langflow/components/prototypes/SelectData.py +++ b/src/backend/base/langflow/components/prototypes/SelectData.py @@ -38,7 +38,8 @@ async def select_data(self) -> Data: # Validate that the selected index is within bounds if selected_index < 0 or selected_index >= len(self.data_list): - raise ValueError(f"Selected index {selected_index} is out of range.") + msg = f"Selected index {selected_index} is out of range." + raise ValueError(msg) # Return the selected Data object selected_data = self.data_list[selected_index] diff --git a/src/backend/base/langflow/components/prototypes/SubFlow.py b/src/backend/base/langflow/components/prototypes/SubFlow.py index 728622f07f0d..312c9803480f 100644 --- a/src/backend/base/langflow/components/prototypes/SubFlow.py +++ b/src/backend/base/langflow/components/prototypes/SubFlow.py @@ -39,7 +39,8 @@ def update_build_config(self, build_config: dotdict, field_value: Any, field_nam try: flow_data = self.get_flow(field_value) if not flow_data: - raise ValueError(f"Flow {field_value} not found.") + msg = f"Flow {field_value} not found." + raise ValueError(msg) graph = Graph.from_payload(flow_data.data["data"]) # Get all inputs from the graph inputs = get_flow_inputs(graph) diff --git a/src/backend/base/langflow/components/prototypes/UpdateData.py b/src/backend/base/langflow/components/prototypes/UpdateData.py index d9de27e49368..7f0d915ffe89 100644 --- a/src/backend/base/langflow/components/prototypes/UpdateData.py +++ b/src/backend/base/langflow/components/prototypes/UpdateData.py @@ -56,7 +56,8 @@ def update_build_config(self, build_config: dotdict, field_value: Any, field_nam existing_fields = {} if field_value_int > 15: build_config["number_of_fields"]["value"] = 15 - raise ValueError("Number of fields cannot exceed 15. Try using a Component to combine two Data.") + msg = "Number of fields cannot exceed 15. Try using a Component to combine two Data." + raise ValueError(msg) if len(build_config) > len(default_keys): # back up the existing template fields for key in build_config.copy(): @@ -105,4 +106,5 @@ def validate_text_key(self, data: Data): """This function validates that the Text Key is one of the keys in the Data""" data_keys = data.data.keys() if self.text_key not in data_keys and self.text_key != "": - raise ValueError(f"Text Key: {self.text_key} not found in the Data keys: {','.join(data_keys)}") + msg = f"Text Key: {self.text_key} not found in the Data keys: {','.join(data_keys)}" + raise ValueError(msg) diff --git a/src/backend/base/langflow/components/retrievers/AmazonKendra.py b/src/backend/base/langflow/components/retrievers/AmazonKendra.py index 86f9b6c9c671..ce98c792f625 100644 --- a/src/backend/base/langflow/components/retrievers/AmazonKendra.py +++ b/src/backend/base/langflow/components/retrievers/AmazonKendra.py @@ -48,5 +48,6 @@ def build( user_context=user_context, ) # type: ignore except Exception as e: - raise ValueError("Could not connect to AmazonKendra API.") from e + msg = "Could not connect to AmazonKendra API." + raise ValueError(msg) from e return cast(Retriever, output) diff --git a/src/backend/base/langflow/components/retrievers/CohereRerank.py b/src/backend/base/langflow/components/retrievers/CohereRerank.py index 059229130050..a15eda2c8b52 100644 --- a/src/backend/base/langflow/components/retrievers/CohereRerank.py +++ b/src/backend/base/langflow/components/retrievers/CohereRerank.py @@ -82,4 +82,5 @@ async def search_documents(self) -> list[Data]: # type: ignore @check_cached_vector_store def build_vector_store(self) -> VectorStore: - raise NotImplementedError("Cohere Rerank does not support vector stores.") + msg = "Cohere Rerank does not support vector stores." + raise NotImplementedError(msg) diff --git a/src/backend/base/langflow/components/retrievers/MetalRetriever.py b/src/backend/base/langflow/components/retrievers/MetalRetriever.py index 30e9876f7461..d30aef4aaf21 100644 --- a/src/backend/base/langflow/components/retrievers/MetalRetriever.py +++ b/src/backend/base/langflow/components/retrievers/MetalRetriever.py @@ -25,5 +25,6 @@ def build(self, api_key: str, client_id: str, index_id: str, params: dict | None try: metal = Metal(api_key=api_key, client_id=client_id, index_id=index_id) except Exception as e: - raise ValueError("Could not connect to Metal API.") from e + msg = "Could not connect to Metal API." + raise ValueError(msg) from e return cast(Retriever, MetalRetriever(client=metal, params=params or {})) diff --git a/src/backend/base/langflow/components/retrievers/NvidiaRerank.py b/src/backend/base/langflow/components/retrievers/NvidiaRerank.py index 04cd964d60df..e699a036b4f8 100644 --- a/src/backend/base/langflow/components/retrievers/NvidiaRerank.py +++ b/src/backend/base/langflow/components/retrievers/NvidiaRerank.py @@ -55,14 +55,16 @@ def update_build_config(self, build_config: dotdict, field_value: Any, field_nam build_config["model"]["options"] = ids build_config["model"]["value"] = ids[0] except Exception as e: - raise ValueError(f"Error getting model names: {e}") + msg = f"Error getting model names: {e}" + raise ValueError(msg) return build_config def build_model(self): try: from langchain_nvidia_ai_endpoints import NVIDIARerank except ImportError: - raise ImportError("Please install langchain-nvidia-ai-endpoints to use the NVIDIA model.") + msg = "Please install langchain-nvidia-ai-endpoints to use the NVIDIA model." + raise ImportError(msg) return NVIDIARerank(api_key=self.api_key, model=self.model, base_url=self.base_url) def build_base_retriever(self) -> Retriever: # type: ignore[type-var] @@ -79,4 +81,5 @@ async def search_documents(self) -> list[Data]: # type: ignore @check_cached_vector_store def build_vector_store(self) -> VectorStore: - raise NotImplementedError("NVIDIA Rerank does not support vector stores.") + msg = "NVIDIA Rerank does not support vector stores." + raise NotImplementedError(msg) diff --git a/src/backend/base/langflow/components/retrievers/SelfQueryRetriever.py b/src/backend/base/langflow/components/retrievers/SelfQueryRetriever.py index 859b20f6cc90..a5c0df16b758 100644 --- a/src/backend/base/langflow/components/retrievers/SelfQueryRetriever.py +++ b/src/backend/base/langflow/components/retrievers/SelfQueryRetriever.py @@ -66,7 +66,8 @@ def retrieve_documents(self) -> list[Data]: elif isinstance(self.query, str): input_text = self.query else: - raise ValueError(f"Query type {type(self.query)} not supported.") + msg = f"Query type {type(self.query)} not supported." + raise ValueError(msg) documents = self_query_retriever.invoke(input=input_text, config={"callbacks": self.get_langchain_callbacks()}) data = [Data.from_document(document) for document in documents] diff --git a/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py b/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py index 10b85c9070de..86733e2d5852 100644 --- a/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py +++ b/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py @@ -51,7 +51,8 @@ def build( for meta in metadata_field_info: meta_obj = json.loads(meta) if "name" not in meta_obj or "description" not in meta_obj or "type" not in meta_obj: - raise Exception("Incorrect metadata field info format.") + msg = "Incorrect metadata field info format." + raise Exception(msg) attribute_info = AttributeInfo( name=meta_obj["name"], description=meta_obj["description"], diff --git a/src/backend/base/langflow/components/tools/GleanSearchAPI.py b/src/backend/base/langflow/components/tools/GleanSearchAPI.py index db15b42f7b76..8408e8503477 100644 --- a/src/backend/base/langflow/components/tools/GleanSearchAPI.py +++ b/src/backend/base/langflow/components/tools/GleanSearchAPI.py @@ -67,7 +67,8 @@ def results(self, query: str, **kwargs: Any) -> list[dict[str, Any]]: results = self._search_api_results(query, **kwargs) if len(results) == 0: - raise AssertionError("No good Glean Search Result was found") + msg = "No good Glean Search Result was found" + raise AssertionError(msg) return results diff --git a/src/backend/base/langflow/components/tools/GoogleSearchAPI.py b/src/backend/base/langflow/components/tools/GoogleSearchAPI.py index dc9d31242696..0d6ca83d61c7 100644 --- a/src/backend/base/langflow/components/tools/GoogleSearchAPI.py +++ b/src/backend/base/langflow/components/tools/GoogleSearchAPI.py @@ -39,5 +39,6 @@ def _build_wrapper(self): try: from langchain_google_community import GoogleSearchAPIWrapper # type: ignore except ImportError: - raise ImportError("Please install langchain-google-community to use GoogleSearchAPIWrapper.") + msg = "Please install langchain-google-community to use GoogleSearchAPIWrapper." + raise ImportError(msg) return GoogleSearchAPIWrapper(google_api_key=self.google_api_key, google_cse_id=self.google_cse_id, k=self.k) diff --git a/src/backend/base/langflow/components/tools/PythonCodeStructuredTool.py b/src/backend/base/langflow/components/tools/PythonCodeStructuredTool.py index c01c062d05d6..bd2af5176b2f 100644 --- a/src/backend/base/langflow/components/tools/PythonCodeStructuredTool.py +++ b/src/backend/base/langflow/components/tools/PythonCodeStructuredTool.py @@ -175,7 +175,8 @@ def run(**kwargs): field_name = attr.split("|")[1] func_arg = self._find_arg(named_functions, func_name, field_name) if func_arg is None: - raise Exception(f"Failed to find arg: {field_name}") + msg = f"Failed to find arg: {field_name}" + raise Exception(msg) field_annotation = func_arg["annotation"] field_description = self._get_value(self._attributes[attr], str) @@ -250,7 +251,8 @@ def _parse_code(self, code: str) -> tuple[list[dict], list[dict]]: func = {"name": node.name, "args": []} for arg in node.args.args: if arg.lineno != arg.end_lineno: - raise Exception("Multiline arguments are not supported") + msg = "Multiline arguments are not supported" + raise Exception(msg) func_arg = { "name": arg.arg, diff --git a/src/backend/base/langflow/components/tools/PythonREPLTool.py b/src/backend/base/langflow/components/tools/PythonREPLTool.py index 27ef44a8ce41..268cf4b03b01 100644 --- a/src/backend/base/langflow/components/tools/PythonREPLTool.py +++ b/src/backend/base/langflow/components/tools/PythonREPLTool.py @@ -54,14 +54,16 @@ def get_globals(self, global_imports: str | list[str]) -> dict: elif isinstance(global_imports, list): modules = global_imports else: - raise ValueError("global_imports must be either a string or a list") + msg = "global_imports must be either a string or a list" + raise ValueError(msg) for module in modules: try: imported_module = importlib.import_module(module) global_dict[imported_module.__name__] = imported_module except ImportError: - raise ImportError(f"Could not import module {module}") + msg = f"Could not import module {module}" + raise ImportError(msg) return global_dict def build_tool(self) -> Tool: diff --git a/src/backend/base/langflow/components/tools/SearXNGTool.py b/src/backend/base/langflow/components/tools/SearXNGTool.py index 787071cb0f2b..789a3fa98538 100644 --- a/src/backend/base/langflow/components/tools/SearXNGTool.py +++ b/src/backend/base/langflow/components/tools/SearXNGTool.py @@ -89,7 +89,8 @@ class SearxSearch: @staticmethod def search(query: str, categories: list[str] = []) -> list: if not SearxSearch._categories and not categories: - raise ValueError("No categories provided.") + msg = "No categories provided." + raise ValueError(msg) all_categories = SearxSearch._categories + list(set(categories) - set(SearxSearch._categories)) try: url = f"{SearxSearch._url}/" diff --git a/src/backend/base/langflow/components/vectorstores/AstraDB.py b/src/backend/base/langflow/components/vectorstores/AstraDB.py index 2131861ac120..1d198bbe2980 100644 --- a/src/backend/base/langflow/components/vectorstores/AstraDB.py +++ b/src/backend/base/langflow/components/vectorstores/AstraDB.py @@ -367,10 +367,11 @@ def build_vector_store(self, vectorize_options=None): from langchain_astradb import AstraDBVectorStore from langchain_astradb.utils.astradb import SetupMode except ImportError: - raise ImportError( + msg = ( "Could not import langchain Astra DB integration package. " "Please install it with `pip install langchain-astradb`." ) + raise ImportError(msg) try: if not self.setup_mode: @@ -378,7 +379,8 @@ def build_vector_store(self, vectorize_options=None): setup_mode_value = SetupMode[self.setup_mode.upper()] except KeyError: - raise ValueError(f"Invalid setup mode: {self.setup_mode}") + msg = f"Invalid setup mode: {self.setup_mode}" + raise ValueError(msg) if self.embedding: embedding_dict = {"embedding": self.embedding} @@ -423,7 +425,8 @@ def build_vector_store(self, vectorize_options=None): try: vector_store = AstraDBVectorStore(**vector_store_kwargs) except Exception as e: - raise ValueError(f"Error initializing AstraDBVectorStore: {str(e)}") from e + msg = f"Error initializing AstraDBVectorStore: {str(e)}" + raise ValueError(msg) from e self._add_documents_to_vector_store(vector_store) @@ -435,14 +438,16 @@ def _add_documents_to_vector_store(self, vector_store): if isinstance(_input, Data): documents.append(_input.to_lc_document()) else: - raise ValueError("Vector Store Inputs must be Data objects.") + msg = "Vector Store Inputs must be Data objects." + raise ValueError(msg) if documents: logger.debug(f"Adding {len(documents)} documents to the Vector Store.") try: vector_store.add_documents(documents) except Exception as e: - raise ValueError(f"Error adding documents to AstraDBVectorStore: {str(e)}") from e + msg = f"Error adding documents to AstraDBVectorStore: {str(e)}" + raise ValueError(msg) from e else: logger.debug("No documents to add to the Vector Store.") @@ -481,7 +486,8 @@ def search_documents(self, vector_store=None) -> list[Data]: docs = vector_store.search(query=self.search_input, search_type=search_type, **search_args) except Exception as e: - raise ValueError(f"Error performing search in AstraDBVectorStore: {str(e)}") from e + msg = f"Error performing search in AstraDBVectorStore: {str(e)}" + raise ValueError(msg) from e logger.debug(f"Retrieved documents: {len(docs)}") diff --git a/src/backend/base/langflow/components/vectorstores/Cassandra.py b/src/backend/base/langflow/components/vectorstores/Cassandra.py index 3c5471c04f55..f1519927adbe 100644 --- a/src/backend/base/langflow/components/vectorstores/Cassandra.py +++ b/src/backend/base/langflow/components/vectorstores/Cassandra.py @@ -137,9 +137,8 @@ def build_vector_store(self) -> Cassandra: import cassio from langchain_community.utilities.cassandra import SetupMode except ImportError: - raise ImportError( - "Could not import cassio integration package. " "Please install it with `pip install cassio`." - ) + msg = "Could not import cassio integration package. " "Please install it with `pip install cassio`." + raise ImportError(msg) from uuid import UUID @@ -235,10 +234,11 @@ def search_documents(self) -> list[Data]: docs = vector_store.search(query=self.search_query, search_type=search_type, **search_args) except KeyError as e: if "content" in str(e): - raise ValueError( + msg = ( "You should ingest data through Langflow (or LangChain) to query it in Langflow. " "Your collection does not contain a field name 'content'." ) + raise ValueError(msg) else: raise e @@ -262,7 +262,8 @@ def _build_search_args(self): args["filter"] = clean_filter if self.body_search: if not self.enable_body_search: - raise ValueError("You should enable body search when creating the table to search the body field.") + msg = "You should enable body search when creating the table to search the body field." + raise ValueError(msg) args["body_search"] = self.body_search return args diff --git a/src/backend/base/langflow/components/vectorstores/CassandraGraph.py b/src/backend/base/langflow/components/vectorstores/CassandraGraph.py index d175b9367991..1aa3b99ee3cd 100644 --- a/src/backend/base/langflow/components/vectorstores/CassandraGraph.py +++ b/src/backend/base/langflow/components/vectorstores/CassandraGraph.py @@ -126,9 +126,8 @@ def build_vector_store(self) -> CassandraGraphVectorStore: import cassio from langchain_community.utilities.cassandra import SetupMode except ImportError: - raise ImportError( - "Could not import cassio integration package. " "Please install it with `pip install cassio`." - ) + msg = "Could not import cassio integration package. " "Please install it with `pip install cassio`." + raise ImportError(msg) database_ref = self.database_ref @@ -214,10 +213,11 @@ def search_documents(self) -> list[Data]: docs = vector_store.search(query=self.search_query, search_type=search_type, **search_args) except KeyError as e: if "content" in str(e): - raise ValueError( + msg = ( "You should ingest data through Langflow (or LangChain) to query it in Langflow. " "Your collection does not contain a field name 'content'." - ) from e + ) + raise ValueError(msg) from e else: raise e diff --git a/src/backend/base/langflow/components/vectorstores/Chroma.py b/src/backend/base/langflow/components/vectorstores/Chroma.py index 4642152f9fa0..119b90c9c1d4 100644 --- a/src/backend/base/langflow/components/vectorstores/Chroma.py +++ b/src/backend/base/langflow/components/vectorstores/Chroma.py @@ -107,9 +107,10 @@ def build_vector_store(self) -> Chroma: from chromadb import Client from langchain_chroma import Chroma except ImportError: - raise ImportError( + msg = ( "Could not import Chroma integration package. " "Please install it with `pip install langchain-chroma`." ) + raise ImportError(msg) # Chroma settings chroma_settings = None client = None @@ -163,7 +164,8 @@ def _add_documents_to_vector_store(self, vector_store: "Chroma") -> None: if _input not in _stored_documents_without_id: documents.append(_input.to_lc_document()) else: - raise ValueError("Vector Store Inputs must be Data objects.") + msg = "Vector Store Inputs must be Data objects." + raise ValueError(msg) if documents and self.embedding is not None: logger.debug(f"Adding {len(documents)} documents to the Vector Store.") diff --git a/src/backend/base/langflow/components/vectorstores/Clickhouse.py b/src/backend/base/langflow/components/vectorstores/Clickhouse.py index 35a85c7873cb..1dd08bc37e8d 100644 --- a/src/backend/base/langflow/components/vectorstores/Clickhouse.py +++ b/src/backend/base/langflow/components/vectorstores/Clickhouse.py @@ -72,16 +72,18 @@ def build_vector_store(self) -> Clickhouse: try: import clickhouse_connect # type: ignore except ImportError as e: - raise ImportError( + msg = ( "Failed to import Clickhouse dependencies. " "Install it using `pip install langflow[clickhouse-connect] --pre`" - ) from e + ) + raise ImportError(msg) from e try: client = clickhouse_connect.get_client(host=self.host, username=self.username, password=self.password) client.command("SELECT 1") except Exception as e: - raise ValueError(f"Failed to connect to Clickhouse: {e}") + msg = f"Failed to connect to Clickhouse: {e}" + raise ValueError(msg) documents = [] for _input in self.ingest_data or []: diff --git a/src/backend/base/langflow/components/vectorstores/Couchbase.py b/src/backend/base/langflow/components/vectorstores/Couchbase.py index e313e10b3ca2..423589605417 100644 --- a/src/backend/base/langflow/components/vectorstores/Couchbase.py +++ b/src/backend/base/langflow/components/vectorstores/Couchbase.py @@ -48,9 +48,8 @@ def build_vector_store(self) -> CouchbaseVectorStore: from couchbase.cluster import Cluster # type: ignore from couchbase.options import ClusterOptions # type: ignore except ImportError as e: - raise ImportError( - "Failed to import Couchbase dependencies. Install it using `pip install langflow[couchbase] --pre`" - ) from e + msg = "Failed to import Couchbase dependencies. Install it using `pip install langflow[couchbase] --pre`" + raise ImportError(msg) from e try: auth = PasswordAuthenticator(self.couchbase_username, self.couchbase_password) @@ -59,7 +58,8 @@ def build_vector_store(self) -> CouchbaseVectorStore: cluster.wait_until_ready(timedelta(seconds=5)) except Exception as e: - raise ValueError(f"Failed to connect to Couchbase: {e}") + msg = f"Failed to connect to Couchbase: {e}" + raise ValueError(msg) documents = [] for _input in self.ingest_data or []: diff --git a/src/backend/base/langflow/components/vectorstores/FAISS.py b/src/backend/base/langflow/components/vectorstores/FAISS.py index e7e6310a62bc..f971508feee4 100644 --- a/src/backend/base/langflow/components/vectorstores/FAISS.py +++ b/src/backend/base/langflow/components/vectorstores/FAISS.py @@ -62,7 +62,8 @@ def build_vector_store(self) -> FAISS: Builds the FAISS object. """ if not self.persist_directory: - raise ValueError("Folder path is required to save the FAISS index.") + msg = "Folder path is required to save the FAISS index." + raise ValueError(msg) path = self.resolve_path(self.persist_directory) documents = [] @@ -83,7 +84,8 @@ def search_documents(self) -> list[Data]: Search for documents in the FAISS vector store. """ if not self.persist_directory: - raise ValueError("Folder path is required to load the FAISS index.") + msg = "Folder path is required to load the FAISS index." + raise ValueError(msg) path = self.resolve_path(self.persist_directory) vector_store = FAISS.load_local( @@ -94,7 +96,8 @@ def search_documents(self) -> list[Data]: ) if not vector_store: - raise ValueError("Failed to load the FAISS index.") + msg = "Failed to load the FAISS index." + raise ValueError(msg) logger.debug(f"Search input: {self.search_query}") logger.debug(f"Number of results: {self.number_of_results}") diff --git a/src/backend/base/langflow/components/vectorstores/HCD.py b/src/backend/base/langflow/components/vectorstores/HCD.py index cb34abdb2994..429b770e9348 100644 --- a/src/backend/base/langflow/components/vectorstores/HCD.py +++ b/src/backend/base/langflow/components/vectorstores/HCD.py @@ -181,18 +181,18 @@ def build_vector_store(self): from langchain_astradb import AstraDBVectorStore from langchain_astradb.utils.astradb import SetupMode except ImportError: - raise ImportError( + msg = ( "Could not import langchain Astra DB integration package. " "Please install it with `pip install langchain-astradb`." ) + raise ImportError(msg) try: from astrapy.authentication import UsernamePasswordTokenProvider from astrapy.constants import Environment except ImportError: - raise ImportError( - "Could not import astrapy integration package. " "Please install it with `pip install astrapy`." - ) + msg = "Could not import astrapy integration package. " "Please install it with `pip install astrapy`." + raise ImportError(msg) try: if not self.setup_mode: @@ -200,7 +200,8 @@ def build_vector_store(self): setup_mode_value = SetupMode[self.setup_mode.upper()] except KeyError: - raise ValueError(f"Invalid setup mode: {self.setup_mode}") + msg = f"Invalid setup mode: {self.setup_mode}" + raise ValueError(msg) if not isinstance(self.embedding, dict): embedding_dict = {"embedding": self.embedding} @@ -246,7 +247,8 @@ def build_vector_store(self): try: vector_store = AstraDBVectorStore(**vector_store_kwargs) except Exception as e: - raise ValueError(f"Error initializing AstraDBVectorStore: {str(e)}") from e + msg = f"Error initializing AstraDBVectorStore: {str(e)}" + raise ValueError(msg) from e self._add_documents_to_vector_store(vector_store) return vector_store @@ -257,14 +259,16 @@ def _add_documents_to_vector_store(self, vector_store): if isinstance(_input, Data): documents.append(_input.to_lc_document()) else: - raise ValueError("Vector Store Inputs must be Data objects.") + msg = "Vector Store Inputs must be Data objects." + raise ValueError(msg) if documents: logger.debug(f"Adding {len(documents)} documents to the Vector Store.") try: vector_store.add_documents(documents) except Exception as e: - raise ValueError(f"Error adding documents to AstraDBVectorStore: {str(e)}") from e + msg = f"Error adding documents to AstraDBVectorStore: {str(e)}" + raise ValueError(msg) from e else: logger.debug("No documents to add to the Vector Store.") @@ -302,7 +306,8 @@ def search_documents(self) -> list[Data]: docs = vector_store.search(query=self.search_input, search_type=search_type, **search_args) except Exception as e: - raise ValueError(f"Error performing search in AstraDBVectorStore: {str(e)}") from e + msg = f"Error performing search in AstraDBVectorStore: {str(e)}" + raise ValueError(msg) from e logger.debug(f"Retrieved documents: {len(docs)}") diff --git a/src/backend/base/langflow/components/vectorstores/Milvus.py b/src/backend/base/langflow/components/vectorstores/Milvus.py index 5391a0a315fd..11e86db57280 100644 --- a/src/backend/base/langflow/components/vectorstores/Milvus.py +++ b/src/backend/base/langflow/components/vectorstores/Milvus.py @@ -74,9 +74,10 @@ def build_vector_store(self): try: from langchain_milvus.vectorstores import Milvus as LangchainMilvus except ImportError: - raise ImportError( + msg = ( "Could not import Milvus integration package. " "Please install it with `pip install langchain-milvus`." ) + raise ImportError(msg) self.connection_args.update(uri=self.uri, token=self.password) milvus_store = LangchainMilvus( embedding_function=self.embedding, diff --git a/src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py b/src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py index 5213ad89ced7..23004db1ab64 100644 --- a/src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py +++ b/src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py @@ -39,13 +39,15 @@ def build_vector_store(self) -> MongoDBAtlasVectorSearch: try: from pymongo import MongoClient except ImportError: - raise ImportError("Please install pymongo to use MongoDB Atlas Vector Store") + msg = "Please install pymongo to use MongoDB Atlas Vector Store" + raise ImportError(msg) try: mongo_client: MongoClient = MongoClient(self.mongodb_atlas_cluster_uri) collection = mongo_client[self.db_name][self.collection_name] except Exception as e: - raise ValueError(f"Failed to connect to MongoDB Atlas: {e}") + msg = f"Failed to connect to MongoDB Atlas: {e}" + raise ValueError(msg) documents = [] for _input in self.ingest_data or []: diff --git a/src/backend/base/langflow/components/vectorstores/Qdrant.py b/src/backend/base/langflow/components/vectorstores/Qdrant.py index 223887d3b7f3..ea52216246a4 100644 --- a/src/backend/base/langflow/components/vectorstores/Qdrant.py +++ b/src/backend/base/langflow/components/vectorstores/Qdrant.py @@ -85,7 +85,8 @@ def build_vector_store(self) -> Qdrant: documents.append(_input) if not isinstance(self.embedding, Embeddings): - raise ValueError("Invalid embedding object") + msg = "Invalid embedding object" + raise ValueError(msg) if documents: qdrant = Qdrant.from_documents(documents, embedding=self.embedding, **qdrant_kwargs) diff --git a/src/backend/base/langflow/components/vectorstores/Redis.py b/src/backend/base/langflow/components/vectorstores/Redis.py index 411b3759e362..2e346719bb4d 100644 --- a/src/backend/base/langflow/components/vectorstores/Redis.py +++ b/src/backend/base/langflow/components/vectorstores/Redis.py @@ -58,7 +58,8 @@ def build_vector_store(self) -> Redis: if not documents: if self.schema is None: - raise ValueError("If no documents are provided, a schema must be provided.") + msg = "If no documents are provided, a schema must be provided." + raise ValueError(msg) redis_vs = Redis.from_existing_index( embedding=self.embedding, index_name=self.redis_index_name, diff --git a/src/backend/base/langflow/components/vectorstores/Vectara.py b/src/backend/base/langflow/components/vectorstores/Vectara.py index a3600d8ec35d..63ea038cb3a1 100644 --- a/src/backend/base/langflow/components/vectorstores/Vectara.py +++ b/src/backend/base/langflow/components/vectorstores/Vectara.py @@ -59,7 +59,8 @@ def build_vector_store(self) -> "Vectara": try: from langchain_community.vectorstores import Vectara except ImportError: - raise ImportError("Could not import Vectara. Please install it with `pip install langchain-community`.") + msg = "Could not import Vectara. Please install it with `pip install langchain-community`." + raise ImportError(msg) vectara = Vectara( vectara_customer_id=self.vectara_customer_id, diff --git a/src/backend/base/langflow/components/vectorstores/Weaviate.py b/src/backend/base/langflow/components/vectorstores/Weaviate.py index af32cbd2f136..2349484d9b0c 100644 --- a/src/backend/base/langflow/components/vectorstores/Weaviate.py +++ b/src/backend/base/langflow/components/vectorstores/Weaviate.py @@ -50,7 +50,8 @@ def build_vector_store(self) -> Weaviate: client = weaviate.Client(url=self.url) if self.index_name != self.index_name.capitalize(): - raise ValueError(f"Weaviate requires the index name to be capitalized. Use: {self.index_name.capitalize()}") + msg = f"Weaviate requires the index name to be capitalized. Use: {self.index_name.capitalize()}" + raise ValueError(msg) documents = [] for _input in self.ingest_data or []: diff --git a/src/backend/base/langflow/components/vectorstores/vectara_rag.py b/src/backend/base/langflow/components/vectorstores/vectara_rag.py index 3a61a1fa31d8..47f54643ea8d 100644 --- a/src/backend/base/langflow/components/vectorstores/vectara_rag.py +++ b/src/backend/base/langflow/components/vectorstores/vectara_rag.py @@ -137,7 +137,8 @@ def generate_response( from langchain_community.vectorstores import Vectara from langchain_community.vectorstores.vectara import RerankConfig, SummaryConfig, VectaraQueryConfig except ImportError: - raise ImportError("Could not import Vectara. Please install it with `pip install langchain-community`.") + msg = "Could not import Vectara. Please install it with `pip install langchain-community`." + raise ImportError(msg) vectara = Vectara(self.vectara_customer_id, self.vectara_corpus_id, self.vectara_api_key) rerank_config = RerankConfig(self.reranker, self.reranker_k, self.diversity_bias) diff --git a/src/backend/base/langflow/custom/attributes.py b/src/backend/base/langflow/custom/attributes.py index 864f23edbc8b..f05d8c58cd0f 100644 --- a/src/backend/base/langflow/custom/attributes.py +++ b/src/backend/base/langflow/custom/attributes.py @@ -13,7 +13,8 @@ def validate_icon(value: str, *args, **kwargs): elif not value.startswith(":") or not value.endswith(":"): # emoji should have both starting and ending colons # so if one of them is missing, we will raise - raise ValueError(f"Invalid emoji. {value} is not a valid emoji.") + msg = f"Invalid emoji. {value} is not a valid emoji." + raise ValueError(msg) emoji_value = emoji.emojize(value, variant="emoji_type") if value == emoji_value: diff --git a/src/backend/base/langflow/custom/code_parser/code_parser.py b/src/backend/base/langflow/custom/code_parser/code_parser.py index 66f29d6b7239..f5f7e59b4e2d 100644 --- a/src/backend/base/langflow/custom/code_parser/code_parser.py +++ b/src/backend/base/langflow/custom/code_parser/code_parser.py @@ -66,7 +66,8 @@ def __init__(self, code: str | type) -> None: self.cache: TTLCache = TTLCache(maxsize=1024, ttl=60) if isinstance(code, type): if not inspect.isclass(code): - raise ValueError("The provided code must be a class.") + msg = "The provided code must be a class." + raise ValueError(msg) # If the code is a class, get its source code code = inspect.getsource(code) self.code = code diff --git a/src/backend/base/langflow/custom/custom_component/component.py b/src/backend/base/langflow/custom/custom_component/component.py index a6242304f0e6..fa670774c984 100644 --- a/src/backend/base/langflow/custom/custom_component/component.py +++ b/src/backend/base/langflow/custom/custom_component/component.py @@ -138,11 +138,13 @@ def set_class_code(self): try: module = inspect.getmodule(self.__class__) if module is None: - raise ValueError("Could not find module for class") + msg = "Could not find module for class" + raise ValueError(msg) class_code = inspect.getsource(module) self._code = class_code except OSError: - raise ValueError(f"Could not find source code for {self.__class__.__name__}") + msg = f"Could not find source code for {self.__class__.__name__}" + raise ValueError(msg) def set(self, **kwargs): """ @@ -209,7 +211,8 @@ def get_input(self, name: str) -> Any: """ if name in self._inputs: return self._inputs[name] - raise ValueError(f"Input {name} not found in {self.__class__.__name__}") + msg = f"Input {name} not found in {self.__class__.__name__}" + raise ValueError(msg) def get_output(self, name: str) -> Any: """ @@ -226,20 +229,23 @@ def get_output(self, name: str) -> Any: """ if name in self._outputs_map: return self._outputs_map[name] - raise ValueError(f"Output {name} not found in {self.__class__.__name__}") + msg = f"Output {name} not found in {self.__class__.__name__}" + raise ValueError(msg) def set_on_output(self, name: str, **kwargs): output = self.get_output(name) for key, value in kwargs.items(): if not hasattr(output, key): - raise ValueError(f"Output {name} does not have a method {key}") + msg = f"Output {name} does not have a method {key}" + raise ValueError(msg) setattr(output, key, value) def set_output_value(self, name: str, value: Any): if name in self._outputs_map: self._outputs_map[name].value = value else: - raise ValueError(f"Output {name} not found in {self.__class__.__name__}") + msg = f"Output {name} not found in {self.__class__.__name__}" + raise ValueError(msg) def map_outputs(self, outputs: list[Output]): """ @@ -256,7 +262,8 @@ def map_outputs(self, outputs: list[Output]): """ for output in outputs: if output.name is None: - raise ValueError("Output name cannot be None.") + msg = "Output name cannot be None." + raise ValueError(msg) # Deepcopy is required to avoid modifying the original component; # allows each instance of each component to modify its own output self._outputs_map[output.name] = deepcopy(output) @@ -274,7 +281,8 @@ def map_inputs(self, inputs: list[InputTypes]): """ for input_ in inputs: if input_.name is None: - raise ValueError("Input name cannot be None.") + msg = "Input name cannot be None." + raise ValueError(msg) self._inputs[input_.name] = deepcopy(input_) def validate(self, params: dict): @@ -303,7 +311,8 @@ def get_output_by_method(self, method: Callable): output = next((output for output in self._outputs_map.values() if output.method == method.__name__), None) if output is None: method_name = method.__name__ if hasattr(method, "__name__") else str(method) - raise ValueError(f"Output with method {method_name} not found") + msg = f"Output with method {method_name} not found" + raise ValueError(msg) return output def _inherits_from_component(self, method: Callable): @@ -340,13 +349,15 @@ def _find_matching_output_method(self, value: Component): matching_pairs.append((output, input_)) if len(matching_pairs) > 1: matching_pairs_str = self._build_error_string_from_matching_pairs(matching_pairs) - raise ValueError( + msg = ( f"There are multiple outputs from {value.__class__.__name__} " f"that can connect to inputs in {self.__class__.__name__}: {matching_pairs_str}" ) + raise ValueError(msg) output, input_ = matching_pairs[0] if not isinstance(output.method, str): - raise ValueError(f"Method {output.method} is not a valid output of {value.__class__.__name__}") + msg = f"Method {output.method} is not a valid output of {value.__class__.__name__}" + raise ValueError(msg) return getattr(value, output.method) def _process_connection_or_parameter(self, key, value): @@ -361,9 +372,8 @@ def _process_connection_or_parameter(self, key, value): try: self._method_is_valid_output(value) except ValueError: - raise ValueError( - f"Method {value.__name__} is not a valid output of {value.__self__.__class__.__name__}" - ) + msg = f"Method {value.__name__} is not a valid output of {value.__self__.__class__.__name__}" + raise ValueError(msg) self._connect_to_component(key, value, _input) else: self._set_parameter_or_attribute(key, value) @@ -416,10 +426,11 @@ def _add_edge(self, component, key, output, _input): def _set_parameter_or_attribute(self, key, value): if isinstance(value, Component): methods = ", ".join([f"'{output.method}'" for output in value.outputs]) - raise ValueError( + msg = ( f"You set {value.display_name} as value for `{key}`. " f"You should pass one of the following: {methods}" ) + raise ValueError(msg) self._set_input_value(key, value) self._parameters[key] = value self._attributes[key] = value @@ -453,26 +464,28 @@ def __getattr__(self, name: str) -> Any: return self.__dict__[f"_{name}"] if name.startswith("_") and name[1:] in BACKWARDS_COMPATIBLE_ATTRIBUTES: return self.__dict__[name] - raise AttributeError(f"{name} not found in {self.__class__.__name__}") + msg = f"{name} not found in {self.__class__.__name__}" + raise AttributeError(msg) def _set_input_value(self, name: str, value: Any): if name in self._inputs: input_value = self._inputs[name].value if isinstance(input_value, Component): methods = ", ".join([f"'{output.method}'" for output in input_value.outputs]) - raise ValueError( + msg = ( f"You set {input_value.display_name} as value for `{name}`. " f"You should pass one of the following: {methods}" ) + raise ValueError(msg) if callable(input_value): - raise ValueError( - f"Input {name} is connected to {input_value.__self__.display_name}.{input_value.__name__}" - ) + msg = f"Input {name} is connected to {input_value.__self__.display_name}.{input_value.__name__}" + raise ValueError(msg) self._inputs[name].value = value if hasattr(self._inputs[name], "load_from_db"): self._inputs[name].load_from_db = False else: - raise ValueError(f"Input {name} not found in {self.__class__.__name__}") + msg = f"Input {name} not found in {self.__class__.__name__}" + raise ValueError(msg) def _validate_outputs(self): # Raise Error if some rule isn't met @@ -489,10 +502,12 @@ def _map_parameters_on_template(self, template: dict): except KeyError: close_match = find_closest_match(name, list(template.keys())) if close_match: - raise ValueError( + msg = ( f"Parameter '{name}' not found in {self.__class__.__name__}. " f"Did you mean '{close_match}'?" ) - raise ValueError(f"Parameter {name} not found in {self.__class__.__name__}. ") + raise ValueError(msg) + msg = f"Parameter {name} not found in {self.__class__.__name__}. " + raise ValueError(msg) def _get_method_return_type(self, method_name: str) -> list[str]: method = getattr(self, method_name) @@ -569,10 +584,11 @@ def set_attributes(self, params: dict): _attributes = {} for key, value in params.items(): if key in self.__dict__ and value != getattr(self, key): - raise ValueError( + msg = ( f"{self.__class__.__name__} defines an input parameter named '{key}' " f"that is a reserved word and cannot be used." ) + raise ValueError(msg) _attributes[key] = value for key, input_obj in self._inputs.items(): if key not in _attributes: @@ -634,7 +650,8 @@ async def _build_results(self): or output.name in self._vertex.edges_source_names ): if output.method is None: - raise ValueError(f"Output {output.name} does not have a method defined.") + msg = f"Output {output.name} does not have a method defined." + raise ValueError(msg) self._current_output = output.name method: Callable = getattr(self, output.method) if output.cache and output.value != UNDEFINED: diff --git a/src/backend/base/langflow/custom/custom_component/custom_component.py b/src/backend/base/langflow/custom/custom_component/custom_component.py index 766e44c3dde5..b3ff427f8b0e 100644 --- a/src/backend/base/langflow/custom/custom_component/custom_component.py +++ b/src/backend/base/langflow/custom/custom_component/custom_component.py @@ -112,39 +112,48 @@ def trace_name(self): def update_state(self, name: str, value: Any): if not self._vertex: - raise ValueError("Vertex is not set") + msg = "Vertex is not set" + raise ValueError(msg) try: self._vertex.graph.update_state(name=name, record=value, caller=self._vertex.id) except Exception as e: - raise ValueError(f"Error updating state: {e}") + msg = f"Error updating state: {e}" + raise ValueError(msg) def stop(self, output_name: str | None = None): if not output_name and self._vertex and len(self._vertex.outputs) == 1: output_name = self._vertex.outputs[0]["name"] elif not output_name: - raise ValueError("You must specify an output name to call stop") + msg = "You must specify an output name to call stop" + raise ValueError(msg) if not self._vertex: - raise ValueError("Vertex is not set") + msg = "Vertex is not set" + raise ValueError(msg) try: self.graph.mark_branch(vertex_id=self._vertex.id, output_name=output_name, state="INACTIVE") except Exception as e: - raise ValueError(f"Error stopping {self.display_name}: {e}") + msg = f"Error stopping {self.display_name}: {e}" + raise ValueError(msg) def append_state(self, name: str, value: Any): if not self._vertex: - raise ValueError("Vertex is not set") + msg = "Vertex is not set" + raise ValueError(msg) try: self._vertex.graph.append_state(name=name, record=value, caller=self._vertex.id) except Exception as e: - raise ValueError(f"Error appending state: {e}") + msg = f"Error appending state: {e}" + raise ValueError(msg) def get_state(self, name: str): if not self._vertex: - raise ValueError("Vertex is not set") + msg = "Vertex is not set" + raise ValueError(msg) try: return self._vertex.graph.get_state(name=name) except Exception as e: - raise ValueError(f"Error getting state: {e}") + msg = f"Error getting state: {e}" + raise ValueError(msg) @staticmethod def resolve_path(path: str) -> str: @@ -270,14 +279,16 @@ def to_data(self, data: Any, keys: list[str] | None = None, silent_errors: bool try: data_dict[key] = model_dump[key] except KeyError: - raise ValueError(f"Key {key} not found in {item}") + msg = f"Key {key} not found in {item}" + raise ValueError(msg) elif isinstance(item, str): data_dict = {"text": item} elif isinstance(item, dict): data_dict = item.copy() else: - raise ValueError(f"Invalid data type: {type(item)}") + msg = f"Invalid data type: {type(item)}" + raise ValueError(msg) data_objects.append(Data(data=data_dict)) @@ -416,7 +427,8 @@ def variables(self): def get_variable(name: str, field: str): if hasattr(self, "_user_id") and not self.user_id: - raise ValueError(f"User id is not set for {self.__class__.__name__}") + msg = f"User id is not set for {self.__class__.__name__}" + raise ValueError(msg) variable_service = get_variable_service() # Get service instance # Retrieve and decrypt the variable by name for the current user with session_scope() as session: @@ -436,7 +448,8 @@ def list_key_names(self): List[str]: The names of the variables for the current user. """ if hasattr(self, "_user_id") and not self.user_id: - raise ValueError(f"User id is not set for {self.__class__.__name__}") + msg = f"User id is not set for {self.__class__.__name__}" + raise ValueError(msg) variable_service = get_variable_service() with session_scope() as session: @@ -469,7 +482,8 @@ def get_function(self): async def load_flow(self, flow_id: str, tweaks: dict | None = None) -> Graph: if not self.user_id: - raise ValueError("Session is invalid") + msg = "Session is invalid" + raise ValueError(msg) return await load_flow(user_id=str(self._user_id), flow_id=flow_id, tweaks=tweaks) async def run_flow( @@ -492,11 +506,13 @@ async def run_flow( def list_flows(self) -> list[Data]: if not self.user_id: - raise ValueError("Session is invalid") + msg = "Session is invalid" + raise ValueError(msg) try: return list_flows(user_id=str(self._user_id)) except Exception as e: - raise ValueError(f"Error listing flows: {e}") + msg = f"Error listing flows: {e}" + raise ValueError(msg) def build(self, *args: Any, **kwargs: Any) -> Any: """ diff --git a/src/backend/base/langflow/custom/directory_reader/directory_reader.py b/src/backend/base/langflow/custom/directory_reader/directory_reader.py index 803d33f229b7..55061c915cbc 100644 --- a/src/backend/base/langflow/custom/directory_reader/directory_reader.py +++ b/src/backend/base/langflow/custom/directory_reader/directory_reader.py @@ -124,7 +124,8 @@ def get_files(self): Walk through the directory path and return a list of all .py files. """ if not (safe_path := self.get_safe_path()): - raise CustomComponentPathValueError(f"The path needs to start with '{self.base_path}'.") + msg = f"The path needs to start with '{self.base_path}'." + raise CustomComponentPathValueError(msg) file_list = [] safe_path_obj = Path(safe_path) diff --git a/src/backend/base/langflow/custom/utils.py b/src/backend/base/langflow/custom/utils.py index 31f825a832e7..67133f0240ce 100644 --- a/src/backend/base/langflow/custom/utils.py +++ b/src/backend/base/langflow/custom/utils.py @@ -267,11 +267,13 @@ def run_build_inputs( def get_component_instance(custom_component: CustomComponent, user_id: str | UUID | None = None): try: if custom_component._code is None: - raise ValueError("Code is None") + msg = "Code is None" + raise ValueError(msg) elif isinstance(custom_component._code, str): custom_class = eval_custom_component_code(custom_component._code) else: - raise ValueError("Invalid code type") + msg = "Invalid code type" + raise ValueError(msg) except Exception as exc: logger.error(f"Error while evaluating custom component code: {str(exc)}") raise HTTPException( @@ -301,11 +303,13 @@ def run_build_config( try: if custom_component._code is None: - raise ValueError("Code is None") + msg = "Code is None" + raise ValueError(msg) elif isinstance(custom_component._code, str): custom_class = eval_custom_component_code(custom_component._code) else: - raise ValueError("Invalid code type") + msg = "Invalid code type" + raise ValueError(msg) except Exception as exc: logger.error(f"Error while evaluating custom component code: {str(exc)}") raise HTTPException( @@ -518,7 +522,8 @@ def update_field_dict( build_config = dd_build_config except Exception as exc: logger.error(f"Error while running update_build_config: {str(exc)}") - raise UpdateBuildConfigError(f"Error while running update_build_config: {str(exc)}") from exc + msg = f"Error while running update_build_config: {str(exc)}" + raise UpdateBuildConfigError(msg) from exc return build_config diff --git a/src/backend/base/langflow/events/event_manager.py b/src/backend/base/langflow/events/event_manager.py index 121d46544a3f..6ce02020bc09 100644 --- a/src/backend/base/langflow/events/event_manager.py +++ b/src/backend/base/langflow/events/event_manager.py @@ -26,19 +26,24 @@ def __init__(self, queue: asyncio.Queue): @staticmethod def _validate_callback(callback: EventCallback): if not callable(callback): - raise ValueError("Callback must be callable") + msg = "Callback must be callable" + raise ValueError(msg) # Check if it has `self, event_type and data` sig = inspect.signature(callback) if len(sig.parameters) != 3: - raise ValueError("Callback must have exactly 3 parameters") + msg = "Callback must have exactly 3 parameters" + raise ValueError(msg) if not all(param.name in ["manager", "event_type", "data"] for param in sig.parameters.values()): - raise ValueError("Callback must have exactly 3 parameters: manager, event_type, and data") + msg = "Callback must have exactly 3 parameters: manager, event_type, and data" + raise ValueError(msg) def register_event(self, name: str, event_type: str, callback: EventCallback | None = None): if not name: - raise ValueError("Event name cannot be empty") + msg = "Event name cannot be empty" + raise ValueError(msg) if not name.startswith("on_"): - raise ValueError("Event name must start with 'on_'") + msg = "Event name must start with 'on_'" + raise ValueError(msg) if callback is None: _callback = partial(self.send_event, event_type=event_type) else: diff --git a/src/backend/base/langflow/field_typing/range_spec.py b/src/backend/base/langflow/field_typing/range_spec.py index 78ed2e4353bf..5cce213aa091 100644 --- a/src/backend/base/langflow/field_typing/range_spec.py +++ b/src/backend/base/langflow/field_typing/range_spec.py @@ -13,16 +13,19 @@ class RangeSpec(BaseModel): @classmethod def max_must_be_greater_than_min(cls, v, values, **kwargs): if "min" in values.data and v <= values.data["min"]: - raise ValueError("Max must be greater than min") + msg = "Max must be greater than min" + raise ValueError(msg) return v @field_validator("step") @classmethod def step_must_be_positive(cls, v, values, **kwargs): if v <= 0: - raise ValueError("Step must be positive") + msg = "Step must be positive" + raise ValueError(msg) if values.data["step_type"] == "int" and isinstance(v, float) and not v.is_integer(): - raise ValueError("When step_type is int, step must be an integer") + msg = "When step_type is int, step must be an integer" + raise ValueError(msg) return v @classmethod diff --git a/src/backend/base/langflow/graph/edge/base.py b/src/backend/base/langflow/graph/edge/base.py index 7858a7e18a12..d09a5691f6f6 100644 --- a/src/backend/base/langflow/graph/edge/base.py +++ b/src/backend/base/langflow/graph/edge/base.py @@ -30,20 +30,23 @@ def __init__(self, source: "Vertex", target: "Vertex", edge: EdgeData): # Check if self._target_handle['fieldName'] if hasattr(target, "_custom_component"): display_name = getattr(target._custom_component, "display_name", "") - raise ValueError( + msg = ( f"Component {display_name} field '{self._target_handle['fieldName']}' " "might not be a valid input." - ) from e + ) + raise ValueError(msg) from e else: - raise ValueError( + msg = ( f"Field '{self._target_handle['fieldName']}' on {target.display_name} " "might not be a valid input." - ) from e + ) + raise ValueError(msg) from e else: raise e else: - raise ValueError("Target handle is not a dictionary") + msg = "Target handle is not a dictionary" + raise ValueError(msg) self.target_param = self.target_handle.field_name # validate handles self.validate_handles(source, target) @@ -59,7 +62,8 @@ def __init__(self, source: "Vertex", target: "Vertex", edge: EdgeData): self.source_handle = None self.target_handle = None else: - raise ValueError("Target handle is not a string") + msg = "Target handle is not a string" + raise ValueError(msg) # Validate in __init__ to fail fast self.validate_edge(source, target) @@ -85,7 +89,8 @@ def _validate_handles(self, source, target) -> None: if not self.valid_handles: logger.debug(self.source_handle) logger.debug(self.target_handle) - raise ValueError(f"Edge between {source.display_name} and {target.display_name} " f"has invalid handles") + msg = f"Edge between {source.display_name} and {target.display_name} " f"has invalid handles" + raise ValueError(msg) def _legacy_validate_handles(self, source, target) -> None: if self.target_handle.input_types is None: @@ -98,7 +103,8 @@ def _legacy_validate_handles(self, source, target) -> None: if not self.valid_handles: logger.debug(self.source_handle) logger.debug(self.target_handle) - raise ValueError(f"Edge between {source.vertex_type} and {target.vertex_type} " f"has invalid handles") + msg = f"Edge between {source.vertex_type} and {target.vertex_type} " f"has invalid handles" + raise ValueError(msg) def __setstate__(self, state): self.source_id = state["source_id"] @@ -154,7 +160,8 @@ def _validate_edge(self, source, target) -> None: if no_matched_type: logger.debug(self.source_types) logger.debug(self.target_reqs) - raise ValueError(f"Edge between {source.vertex_type} and {target.vertex_type} " f"has no matched type. ") + msg = f"Edge between {source.vertex_type} and {target.vertex_type} " f"has no matched type. " + raise ValueError(msg) def _legacy_validate_edge(self, source, target) -> None: # Validate that the outputs of the source node are valid inputs @@ -175,7 +182,8 @@ def _legacy_validate_edge(self, source, target) -> None: if no_matched_type: logger.debug(self.source_types) logger.debug(self.target_reqs) - raise ValueError(f"Edge between {source.vertex_type} and {target.vertex_type} " f"has no matched type") + msg = f"Edge between {source.vertex_type} and {target.vertex_type} " f"has no matched type" + raise ValueError(msg) def __repr__(self) -> str: if (hasattr(self, "source_handle") and self.source_handle) and ( @@ -221,7 +229,8 @@ async def honor(self, source: "Vertex", target: "Vertex") -> None: if not source._built: # The system should be read-only, so we should not be building vertices # that are not already built. - raise ValueError(f"Source vertex {source.id} is not built.") + msg = f"Source vertex {source.id} is not built." + raise ValueError(msg) if self.matched_type == "Text": self.result = source._built_result diff --git a/src/backend/base/langflow/graph/edge/schema.py b/src/backend/base/langflow/graph/edge/schema.py index 3ad16017e469..3136a952f61c 100644 --- a/src/backend/base/langflow/graph/edge/schema.py +++ b/src/backend/base/langflow/graph/edge/schema.py @@ -65,7 +65,8 @@ def validate_name(cls, v, _info): # 'OpenAIModel-u4iGV_text_output' splits = v.split("_", 1) if len(splits) != 2: - raise ValueError(f"Invalid source handle name {v}") + msg = f"Invalid source handle name {v}" + raise ValueError(msg) v = splits[1] return v diff --git a/src/backend/base/langflow/graph/graph/base.py b/src/backend/base/langflow/graph/graph/base.py index 9e6f6dc42b81..c5828959dc6a 100644 --- a/src/backend/base/langflow/graph/graph/base.py +++ b/src/backend/base/langflow/graph/graph/base.py @@ -125,7 +125,8 @@ def __init__( self._set_start_and_end(start, end) self.prepare(start_component_id=start._id) if (start is not None and end is None) or (start is None and end is not None): - raise ValueError("You must provide both input and output components") + msg = "You must provide both input and output components" + raise ValueError(msg) @property def state_model(self): @@ -135,7 +136,8 @@ def state_model(self): def __add__(self, other): if not isinstance(other, Graph): - raise TypeError("Can only add Graph objects") + msg = "Can only add Graph objects" + raise TypeError(msg) # Add the vertices and edges from the other graph to this graph new_instance = copy.deepcopy(self) for vertex in other.vertices: @@ -147,7 +149,8 @@ def __add__(self, other): def __iadd__(self, other): if not isinstance(other, Graph): - raise TypeError("Can only add Graph objects") + msg = "Can only add Graph objects" + raise TypeError(msg) # Add the vertices and edges from the other graph to this graph for vertex in other.vertices: # This updates the edges as well @@ -211,7 +214,8 @@ def add_component(self, component: Component, component_id: str | None = None) - return component_id component._id = component_id if component_id in self.vertex_map: - raise ValueError(f"Component ID {component_id} already exists") + msg = f"Component ID {component_id} already exists" + raise ValueError(msg) frontend_node = component.to_frontend_node() self._vertices.append(frontend_node) vertex = self._create_vertex(frontend_node) @@ -229,24 +233,30 @@ def add_component(self, component: Component, component_id: str | None = None) - def _set_start_and_end(self, start: Component, end: Component): if not hasattr(start, "to_frontend_node"): - raise TypeError(f"start must be a Component. Got {type(start)}") + msg = f"start must be a Component. Got {type(start)}" + raise TypeError(msg) if not hasattr(end, "to_frontend_node"): - raise TypeError(f"end must be a Component. Got {type(end)}") + msg = f"end must be a Component. Got {type(end)}" + raise TypeError(msg) self.add_component(start, start._id) self.add_component(end, end._id) def add_component_edge(self, source_id: str, output_input_tuple: tuple[str, str], target_id: str): source_vertex = self.get_vertex(source_id) if not isinstance(source_vertex, ComponentVertex): - raise ValueError(f"Source vertex {source_id} is not a component vertex.") + msg = f"Source vertex {source_id} is not a component vertex." + raise ValueError(msg) target_vertex = self.get_vertex(target_id) if not isinstance(target_vertex, ComponentVertex): - raise ValueError(f"Target vertex {target_id} is not a component vertex.") + msg = f"Target vertex {target_id} is not a component vertex." + raise ValueError(msg) output_name, input_name = output_input_tuple if source_vertex._custom_component is None: - raise ValueError(f"Source vertex {source_id} does not have a custom component.") + msg = f"Source vertex {source_id} does not have a custom component." + raise ValueError(msg) if target_vertex._custom_component is None: - raise ValueError(f"Target vertex {target_id} does not have a custom component.") + msg = f"Target vertex {target_id} does not have a custom component." + raise ValueError(msg) try: input_field = target_vertex.get_input(input_name) @@ -255,7 +265,8 @@ def add_component_edge(self, source_id: str, output_input_tuple: tuple[str, str] except ValueError: input_field = target_vertex.data.get("node", {}).get("template", {}).get(input_name) if not input_field: - raise ValueError(f"Input field {input_name} not found in target vertex {target_id}") + msg = f"Input field {input_name} not found in target vertex {target_id}" + raise ValueError(msg) input_types = input_field.get("input_types", []) input_field_type = input_field.get("type", "") @@ -287,7 +298,8 @@ async def async_start( event_manager: EventManager | None = None, ): if not self._prepared: - raise ValueError("Graph not prepared. Call prepare() first.") + msg = "Graph not prepared. Call prepare() first." + raise ValueError(msg) # The idea is for this to return a generator that yields the result of # each step call and raise StopIteration when the graph is done for _input in inputs or []: @@ -306,7 +318,8 @@ async def async_start( if isinstance(result, Finish): return - raise ValueError("Max iterations reached") + msg = "Max iterations reached" + raise ValueError(msg) def _snapshot(self): return { @@ -488,15 +501,17 @@ def validate_stream(self): successors = self.get_all_successors(vertex) for successor in successors: if successor.params.get("stream") or successor.params.get("streaming"): - raise ValueError( + msg = ( f"Components {vertex.display_name} and {successor.display_name} " "are connected and both have stream or streaming set to True" ) + raise ValueError(msg) @property def first_layer(self): if self._first_layer is None: - raise ValueError("Graph not prepared. Call prepare() first.") + msg = "Graph not prepared. Call prepare() first." + raise ValueError(msg) return self._first_layer @property @@ -528,7 +543,8 @@ def run_id(self): ValueError: If the run ID is not set. """ if not self._run_id: - raise ValueError("Run ID not set") + msg = "Run ID not set" + raise ValueError(msg) return self._run_id def set_run_id(self, run_id: uuid.UUID | None = None): @@ -603,7 +619,8 @@ def _set_inputs(self, input_components: list[str], inputs: dict[str, str], input elif input_type is not None and input_type != "any" and input_type not in vertex.id.lower(): continue if vertex is None: - raise ValueError(f"Vertex {vertex_id} not found") + msg = f"Vertex {vertex_id} not found" + raise ValueError(msg) vertex.update_raw_params(inputs, overwrite=True) async def _run( @@ -631,19 +648,22 @@ async def _run( """ if input_components and not isinstance(input_components, list): - raise ValueError(f"Invalid components value: {input_components}. Expected list") + msg = f"Invalid components value: {input_components}. Expected list" + raise ValueError(msg) elif input_components is None: input_components = [] if not isinstance(inputs.get(INPUT_FIELD_NAME, ""), str): - raise ValueError(f"Invalid input value: {inputs.get(INPUT_FIELD_NAME)}. Expected string") + msg = f"Invalid input value: {inputs.get(INPUT_FIELD_NAME)}. Expected string" + raise ValueError(msg) if inputs: self._set_inputs(input_components, inputs, input_type) # Update all the vertices with the session_id for vertex_id in self._has_session_id_vertices: vertex = self.get_vertex(vertex_id) if vertex is None: - raise ValueError(f"Vertex {vertex_id} not found") + msg = f"Vertex {vertex_id} not found" + raise ValueError(msg) vertex.update_raw_params({"session_id": session_id}) # Process the graph try: @@ -660,7 +680,8 @@ async def _run( self.increment_run_count() except Exception as exc: asyncio.create_task(self.end_all_traces(error=exc)) - raise ValueError(f"Error running graph: {exc}") from exc + msg = f"Error running graph: {exc}" + raise ValueError(msg) from exc finally: asyncio.create_task(self.end_all_traces()) # Get the outputs @@ -669,7 +690,8 @@ async def _run( if not vertex._built: continue if vertex is None: - raise ValueError(f"Vertex {vertex_id} not found") + msg = f"Vertex {vertex_id} not found" + raise ValueError(msg) if not vertex.result and not stream and hasattr(vertex, "consume_async_generator"): await vertex.consume_async_generator() @@ -719,7 +741,8 @@ def run( # Attempt to get the running event loop; if none, an exception is raised loop = asyncio.get_running_loop() if loop.is_closed(): - raise RuntimeError("The running event loop is closed.") + msg = "The running event loop is closed." + raise RuntimeError(msg) except RuntimeError: # If there's no running event loop or it's closed, use asyncio.run return asyncio.run(coro) @@ -1001,11 +1024,11 @@ def from_payload( logger.exception(exc) if "nodes" not in payload and "edges" not in payload: logger.exception(exc) - raise ValueError( - f"Invalid payload. Expected keys 'nodes' and 'edges'. Found {list(payload.keys())}" - ) from exc + msg = f"Invalid payload. Expected keys 'nodes' and 'edges'. Found {list(payload.keys())}" + raise ValueError(msg) from exc - raise ValueError(f"Error while creating graph from payload: {exc}") from exc + msg = f"Error while creating graph from payload: {exc}" + raise ValueError(msg) from exc def __eq__(self, other: object) -> bool: if not isinstance(other, Graph): @@ -1181,7 +1204,8 @@ def get_vertex(self, vertex_id: str, silent: bool = False) -> Vertex: try: return self.vertex_map[vertex_id] except KeyError: - raise ValueError(f"Vertex {vertex_id} not found") + msg = f"Vertex {vertex_id} not found" + raise ValueError(msg) def get_root_of_group_node(self, vertex_id: str) -> Vertex: """Returns the root of a group node.""" @@ -1195,7 +1219,8 @@ def get_root_of_group_node(self, vertex_id: str) -> Vertex: successors = self.get_all_successors(vertex, recursive=False) if not any(successor in vertices for successor in successors): return vertex - raise ValueError(f"Vertex {vertex_id} is not a top level vertex or no root vertex found") + msg = f"Vertex {vertex_id} is not a top level vertex or no root vertex found" + raise ValueError(msg) def get_next_in_queue(self): if not self._run_queue: @@ -1213,7 +1238,8 @@ async def astep( event_manager: EventManager | None = None, ): if not self._prepared: - raise ValueError("Graph not prepared. Call prepare() first.") + msg = "Graph not prepared. Call prepare() first." + raise ValueError(msg) if not self._run_queue: asyncio.create_task(self.end_all_traces()) return Finish() @@ -1357,7 +1383,8 @@ async def build_vertex( result_dict = vertex.result artifacts = vertex.artifacts else: - raise ValueError(f"No result found for vertex {vertex_id}") + msg = f"No result found for vertex {vertex_id}" + raise ValueError(msg) vertex_build_result = VertexBuildResult( result_dict=result_dict, params=params, valid=valid, artifacts=artifacts, vertex=vertex @@ -1486,7 +1513,8 @@ async def _execute_tasks(self, tasks: list[asyncio.Task], lock: asyncio.Lock) -> elif isinstance(result, tuple) and len(result) == 5: vertices.append(result[4]) else: - raise ValueError(f"Invalid result from task {task_name}: {result}") + msg = f"Invalid result from task {task_name}: {result}" + raise ValueError(msg) for v in vertices: # set all executed vertices as non-runnable to not run them again. @@ -1519,7 +1547,8 @@ def topological_sort(self) -> list[Vertex]: def dfs(vertex): if state[vertex] == 1: # We have a cycle - raise ValueError("Graph contains a cycle, cannot perform topological sort") + msg = "Graph contains a cycle, cannot perform topological sort" + raise ValueError(msg) if state[vertex] == 0: state[vertex] = 1 for edge in vertex.edges: @@ -1631,9 +1660,11 @@ def build_edge(self, edge: EdgeData) -> CycleEdge | Edge: target = self.get_vertex(edge["target"]) if source is None: - raise ValueError(f"Source vertex {edge['source']} not found") + msg = f"Source vertex {edge['source']} not found" + raise ValueError(msg) if target is None: - raise ValueError(f"Target vertex {edge['target']} not found") + msg = f"Target vertex {edge['target']} not found" + raise ValueError(msg) if (source.id, target.id) in self.cycles: new_edge: CycleEdge | Edge = CycleEdge(source, target, edge) else: @@ -1676,7 +1707,8 @@ def _create_vertex(self, frontend_data: NodeData): vertex_type: str = vertex_data["type"] # type: ignore vertex_base_type: str = vertex_data["node"]["template"]["_type"] # type: ignore if "id" not in vertex_data: - raise ValueError(f"Vertex data for {vertex_data['display_name']} does not contain an id") + msg = f"Vertex data for {vertex_data['display_name']} does not contain an id" + raise ValueError(msg) VertexClass = self._get_vertex_class(vertex_type, vertex_base_type, vertex_data["id"]) @@ -1687,7 +1719,8 @@ def _create_vertex(self, frontend_data: NodeData): def prepare(self, stop_component_id: str | None = None, start_component_id: str | None = None): self.initialize() if stop_component_id and start_component_id: - raise ValueError("You can only provide one of stop_component_id or start_component_id") + msg = "You can only provide one of stop_component_id or start_component_id" + raise ValueError(msg) self.validate_stream() if stop_component_id or start_component_id: @@ -1748,7 +1781,8 @@ def layered_topological_sort( # Find the chat input component chat_input = find_start_component_id(vertices_ids) if chat_input is None: - raise ValueError("No input component found and no start component provided") + msg = "No input component found and no start component provided" + raise ValueError(msg) queue = deque([chat_input]) else: queue = deque( diff --git a/src/backend/base/langflow/graph/graph/state_model.py b/src/backend/base/langflow/graph/graph/state_model.py index e747f079a79f..aaf8ed47e5f8 100644 --- a/src/backend/base/langflow/graph/graph/state_model.py +++ b/src/backend/base/langflow/graph/graph/state_model.py @@ -53,7 +53,8 @@ def create_state_model_from_graph(graph: BaseModel) -> type[BaseModel]: """ for vertex in graph.vertices: if hasattr(vertex, "_custom_component") and vertex._custom_component is None: - raise ValueError(f"Vertex {vertex.id} does not have a component instance.") + msg = f"Vertex {vertex.id} does not have a component instance." + raise ValueError(msg) state_model_getters = [ vertex._custom_component.get_state_model_instance_getter() diff --git a/src/backend/base/langflow/graph/graph/utils.py b/src/backend/base/langflow/graph/graph/utils.py index 4f2dc346b273..324ded266672 100644 --- a/src/backend/base/langflow/graph/graph/utils.py +++ b/src/backend/base/langflow/graph/graph/utils.py @@ -173,7 +173,8 @@ def set_new_target_handle(proxy_id, new_edge, target_handle, node): new_edge["target"] = proxy_id _type = target_handle.get("type") if _type is None: - raise KeyError("The 'type' key must be present in target_handle.") + msg = "The 'type' key must be present in target_handle." + raise KeyError(msg) field = target_handle["proxy"]["field"] new_target_handle = { @@ -267,7 +268,8 @@ def get_root_of_group_node( if not any(successor in child_vertices for successor in successors): return child_id - raise ValueError(f"Vertex {vertex_id} is not a top level vertex or no root vertex found") + msg = f"Vertex {vertex_id} is not a top level vertex or no root vertex found" + raise ValueError(msg) def sort_up_to_vertex( @@ -281,10 +283,12 @@ def sort_up_to_vertex( stop_or_start_vertex = graph[vertex_id] except KeyError: if parent_node_map is None: - raise ValueError("Parent node map is required to find the root of a group node") + msg = "Parent node map is required to find the root of a group node" + raise ValueError(msg) vertex_id = get_root_of_group_node(graph=graph, vertex_id=vertex_id, parent_node_map=parent_node_map) if vertex_id not in graph: - raise ValueError(f"Vertex {vertex_id} not found into graph") + msg = f"Vertex {vertex_id} not found into graph" + raise ValueError(msg) stop_or_start_vertex = graph[vertex_id] visited, excluded = set(), set() diff --git a/src/backend/base/langflow/graph/state/model.py b/src/backend/base/langflow/graph/state/model.py index 4d4cc6f23fcf..7bf2c94b3488 100644 --- a/src/backend/base/langflow/graph/state/model.py +++ b/src/backend/base/langflow/graph/state/model.py @@ -30,9 +30,11 @@ class does not have a get_output_by_method attribute. >>> __validate_method(lambda x: x) # This will raise a ValueError """ if not hasattr(method, "__self__"): - raise ValueError(f"Method {method} does not have a __self__ attribute.") + msg = f"Method {method} does not have a __self__ attribute." + raise ValueError(msg) if not hasattr(method.__self__, "get_output_by_method"): - raise ValueError(f"Method's class {method.__self__} must have a get_output_by_method attribute.") + msg = f"Method's class {method.__self__} must have a get_output_by_method attribute." + raise ValueError(msg) def build_output_getter(method: Callable, validate: bool = True) -> Callable: @@ -82,7 +84,8 @@ def output_getter(_): return_type = get_type_hints(method).get("return", None) if return_type is None: - raise ValueError(f"Method {method.__name__} has no return type annotation.") + msg = f"Method {method.__name__} has no return type annotation." + raise ValueError(msg) output_getter.__annotations__["return"] = return_type return output_getter @@ -226,10 +229,12 @@ def create_state_model(model_name: str = "State", validate: bool = True, **kwarg # (, Field(...)) # typing.Annotated[, Field(...)] if not isinstance(value[0], type): - raise ValueError(f"Invalid type for field {name}: {type(value[0])}") + msg = f"Invalid type for field {name}: {type(value[0])}" + raise ValueError(msg) fields[name] = (value[0], value[1]) else: - raise ValueError(f"Invalid value type {type(value)} for field {name}") + msg = f"Invalid value type {type(value)} for field {name}" + raise ValueError(msg) # Create the model dynamically config_dict = ConfigDict(arbitrary_types_allowed=True, validate_assignment=True) diff --git a/src/backend/base/langflow/graph/vertex/base.py b/src/backend/base/langflow/graph/vertex/base.py index 96fcd08f0456..419240ab1656 100644 --- a/src/backend/base/langflow/graph/vertex/base.py +++ b/src/backend/base/langflow/graph/vertex/base.py @@ -101,7 +101,8 @@ def __init__( def set_input_value(self, name: str, value: Any): if self._custom_component is None: - raise ValueError(f"Vertex {self.id} does not have a component instance.") + msg = f"Vertex {self.id} does not have a component instance." + raise ValueError(msg) self._custom_component._set_input_value(name, value) def to_data(self): @@ -211,7 +212,8 @@ def _parse_data(self) -> None: self.data = self._data["data"] if self.data["node"]["template"]["_type"] == "Component": if "outputs" not in self.data["node"]: - raise ValueError(f"Outputs not found for {self.display_name}") + msg = f"Outputs not found for {self.display_name}" + raise ValueError(msg) self.outputs = self.data["node"]["outputs"] else: self.outputs = self.data["node"].get("outputs", []) @@ -255,7 +257,8 @@ def _parse_data(self) -> None: def get_value_from_template_dict(self, key: str): template_dict = self.data.get("node", {}).get("template", {}) if key not in template_dict: - raise ValueError(f"Key {key} not found in template dict") + msg = f"Key {key} not found in template dict" + raise ValueError(msg) return template_dict.get(key, {}).get("value") def get_task(self): @@ -308,7 +311,8 @@ def _build_params(self): # and use that as the value for the param if self.graph is None: - raise ValueError("Graph not found") + msg = "Graph not found" + raise ValueError(msg) if self.updated_raw_params: self.updated_raw_params = False @@ -406,7 +410,8 @@ def _build_params(self): if isinstance(val, list) and all(isinstance(item, dict) for item in val): params[field_name] = pd.DataFrame(val) else: - raise ValueError(f"Invalid value type {type(val)} for field {field_name}") + msg = f"Invalid value type {type(val)} for field {field_name}" + raise ValueError(msg) elif val is not None and val != "": params[field_name] = val @@ -461,7 +466,8 @@ async def _build( await self._build_each_vertex_in_params_dict(user_id) if self.base_type is None: - raise ValueError(f"Base type for vertex {self.display_name} not found") + msg = f"Base type for vertex {self.display_name} not found" + raise ValueError(msg) if not self._custom_component: custom_component, custom_params = await initialize.loading.instantiate_class( @@ -618,7 +624,8 @@ async def _get_result(self, requester: "Vertex", target_handle_name: str | None if not self._built: if flow_id: asyncio.create_task(log_transaction(str(flow_id), source=self, target=requester, status="error")) - raise ValueError(f"Component {self.display_name} has not been built yet") + msg = f"Component {self.display_name} has not been built yet" + raise ValueError(msg) result = self._built_result if self.use_result else self._built_object if flow_id: @@ -662,10 +669,11 @@ async def _build_list_of_vertices_and_update_params( self.params[key].append(result) except AttributeError as e: logger.exception(e) - raise ValueError( + msg = ( f"Params {key} ({self.params[key]}) is not a list and cannot be extended with {result}" f"Error building Component {self.display_name}: \n\n{str(e)}" - ) from e + ) + raise ValueError(msg) from e def _handle_func(self, key, result): """ @@ -705,7 +713,8 @@ async def _build_results(self, custom_component, custom_params, fallback_to_env_ except Exception as exc: tb = traceback.format_exc() logger.exception(exc) - raise ComponentBuildException(f"Error building Component {self.display_name}: \n\n{exc}", tb) from exc + msg = f"Error building Component {self.display_name}: \n\n{exc}" + raise ComponentBuildException(msg, tb) from exc def _update_built_object_and_artifacts(self, result: Any | tuple[Any, dict] | tuple["Component", Any, dict]): """ @@ -730,7 +739,8 @@ def _validate_built_object(self): Checks if the built object is None and raises a ValueError if so. """ if isinstance(self._built_object, UnbuiltObject): - raise ValueError(f"{self.display_name}: {self._built_object_repr()}") + msg = f"{self.display_name}: {self._built_object_repr()}" + raise ValueError(msg) elif self._built_object is None: message = f"{self.display_name} returned None." if self.base_type == "custom_components": @@ -739,7 +749,8 @@ def _validate_built_object(self): logger.warning(message) elif isinstance(self._built_object, Iterator | AsyncIterator): if self.display_name in ["Text Output"]: - raise ValueError(f"You are trying to stream to a {self.display_name}. Try using a Chat Output instead.") + msg = f"You are trying to stream to a {self.display_name}. Try using a Chat Output instead." + raise ValueError(msg) def _reset(self, params_update: dict[str, Any] | None = None): self._built = False diff --git a/src/backend/base/langflow/graph/vertex/types.py b/src/backend/base/langflow/graph/vertex/types.py index 466e049eaa13..2ea578a1dd19 100644 --- a/src/backend/base/langflow/graph/vertex/types.py +++ b/src/backend/base/langflow/graph/vertex/types.py @@ -40,7 +40,8 @@ def __init__(self, data: NodeData, graph): def get_input(self, name: str) -> InputTypes: if self._custom_component is None: - raise ValueError(f"Vertex {self.id} does not have a component instance.") + msg = f"Vertex {self.id} does not have a component instance." + raise ValueError(msg) return self._custom_component.get_input(name) def get_output(self, name: str) -> Output: @@ -105,10 +106,12 @@ async def _get_result(self, requester: "Vertex", target_handle_name: str | None if edge.is_cycle and edge.target_param: return requester.get_value_from_template_dict(edge.target_param) - raise ValueError(f"Component {self.display_name} has not been built yet") + msg = f"Component {self.display_name} has not been built yet" + raise ValueError(msg) if requester is None: - raise ValueError("Requester Vertex is None") + msg = "Requester Vertex is None" + raise ValueError(msg) edges = self.get_edge_with_target(requester.id) result = UNDEFINED @@ -131,11 +134,14 @@ async def _get_result(self, requester: "Vertex", target_handle_name: str | None break if result is UNDEFINED: if edge is None: - raise ValueError(f"Edge not found between {self.display_name} and {requester.display_name}") + msg = f"Edge not found between {self.display_name} and {requester.display_name}" + raise ValueError(msg) elif edge.source_handle.name not in self.results: - raise ValueError(f"Result not found for {edge.source_handle.name}. Results: {self.results}") + msg = f"Result not found for {edge.source_handle.name}. Results: {self.results}" + raise ValueError(msg) else: - raise ValueError(f"Result not found for {edge.source_handle.name} in {edge}") + msg = f"Result not found for {edge.source_handle.name} in {edge}" + raise ValueError(msg) if flow_id: asyncio.create_task(log_transaction(source=self, target=requester, flow_id=str(flow_id), status="success")) return result @@ -347,7 +353,8 @@ def _process_data_component(self): elif ignore_errors: logger.error(f"Data expected, but got {value} of type {type(value)}") else: - raise ValueError(f"Data expected, but got {value} of type {type(value)}") + msg = f"Data expected, but got {value} of type {type(value)}" + raise ValueError(msg) self.artifacts = DataOutputResponse(data=artifacts) return self._built_object @@ -370,7 +377,8 @@ async def _run(self, *args, **kwargs): async def stream(self): iterator = self.params.get(INPUT_FIELD_NAME, None) if not isinstance(iterator, AsyncIterator | Iterator): - raise ValueError("The message must be an iterator or an async iterator.") + msg = "The message must be an iterator or an async iterator." + raise ValueError(msg) is_async = isinstance(iterator, AsyncIterator) complete_message = "" if is_async: diff --git a/src/backend/base/langflow/helpers/data.py b/src/backend/base/langflow/helpers/data.py index 4361789df605..36eee93754dc 100644 --- a/src/backend/base/langflow/helpers/data.py +++ b/src/backend/base/langflow/helpers/data.py @@ -58,7 +58,8 @@ def messages_to_text(template: str, messages: Message | list[Message]) -> str: for message in messages: # If it is not a message, create one with the key "text" if not isinstance(message, Message): - raise ValueError("All elements in the list must be of type Message.") + msg = "All elements in the list must be of type Message." + raise ValueError(msg) _messages.append(message) formated_messages = [template.format(data=message.model_dump(), **message.model_dump()) for message in _messages] diff --git a/src/backend/base/langflow/helpers/flow.py b/src/backend/base/langflow/helpers/flow.py index 97e68cf9c113..6eeb300a9b1a 100644 --- a/src/backend/base/langflow/helpers/flow.py +++ b/src/backend/base/langflow/helpers/flow.py @@ -26,7 +26,8 @@ def list_flows(*, user_id: str | None = None) -> list[Data]: if not user_id: - raise ValueError("Session is invalid") + msg = "Session is invalid" + raise ValueError(msg) try: with session_scope() as session: flows = session.exec( @@ -36,7 +37,8 @@ def list_flows(*, user_id: str | None = None) -> list[Data]: flows_data = [flow.to_data() for flow in flows] return flows_data except Exception as e: - raise ValueError(f"Error listing flows: {e}") + msg = f"Error listing flows: {e}" + raise ValueError(msg) async def load_flow( @@ -46,16 +48,19 @@ async def load_flow( from langflow.processing.process import process_tweaks if not flow_id and not flow_name: - raise ValueError("Flow ID or Flow Name is required") + msg = "Flow ID or Flow Name is required" + raise ValueError(msg) if not flow_id and flow_name: flow_id = find_flow(flow_name, user_id) if not flow_id: - raise ValueError(f"Flow {flow_name} not found") + msg = f"Flow {flow_name} not found" + raise ValueError(msg) with session_scope() as session: graph_data = flow.data if (flow := session.get(Flow, flow_id)) else None if not graph_data: - raise ValueError(f"Flow {flow_id} not found") + msg = f"Flow {flow_id} not found" + raise ValueError(msg) if tweaks: graph_data = process_tweaks(graph_data=graph_data, tweaks=tweaks) graph = Graph.from_payload(graph_data, flow_id=flow_id, user_id=user_id) @@ -78,7 +83,8 @@ async def run_flow( run_id: str | None = None, ) -> list[RunOutputs]: if user_id is None: - raise ValueError("Session is invalid") + msg = "Session is invalid" + raise ValueError(msg) graph = await load_flow(user_id, flow_id, flow_name, tweaks) if run_id: graph.set_run_id(UUID(run_id)) diff --git a/src/backend/base/langflow/initial_setup/setup.py b/src/backend/base/langflow/initial_setup/setup.py index 9a296e3f3725..3b28aeee17f1 100644 --- a/src/backend/base/langflow/initial_setup/setup.py +++ b/src/backend/base/langflow/initial_setup/setup.py @@ -352,7 +352,8 @@ def load_starter_projects(retries=3, delay=1) -> list[tuple[Path, dict]]: except orjson.JSONDecodeError as e: attempt += 1 if attempt >= retries: - raise ValueError(f"Error loading starter project {file}: {e}") + msg = f"Error loading starter project {file}: {e}" + raise ValueError(msg) time.sleep(delay) # Wait before retrying return starter_projects @@ -363,7 +364,8 @@ def copy_profile_pictures(): target = Path(config_dir) / "profile_pictures" if not os.path.exists(origin): - raise ValueError(f"The source folder '{origin}' does not exist.") + msg = f"The source folder '{origin}' does not exist." + raise ValueError(msg) if not os.path.exists(target): os.makedirs(target) @@ -605,7 +607,8 @@ def initialize_super_user_if_needed(): username = settings_service.auth_settings.SUPERUSER password = settings_service.auth_settings.SUPERUSER_PASSWORD if not username or not password: - raise ValueError("SUPERUSER and SUPERUSER_PASSWORD must be set in the settings if AUTO_LOGIN is true.") + msg = "SUPERUSER and SUPERUSER_PASSWORD must be set in the settings if AUTO_LOGIN is true." + raise ValueError(msg) with session_scope() as session: super_user = create_super_user(db=session, username=username, password=password) diff --git a/src/backend/base/langflow/inputs/input_mixin.py b/src/backend/base/langflow/inputs/input_mixin.py index 0e095d40f2a7..2410479c578c 100644 --- a/src/backend/base/langflow/inputs/input_mixin.py +++ b/src/backend/base/langflow/inputs/input_mixin.py @@ -132,13 +132,16 @@ class FileMixin(BaseModel): @classmethod def validate_file_types(cls, v): if not isinstance(v, list): - raise ValueError("file_types must be a list") + msg = "file_types must be a list" + raise ValueError(msg) # types should be a list of extensions without the dot for file_type in v: if not isinstance(file_type, str): - raise ValueError("file_types must be a list of strings") + msg = "file_types must be a list of strings" + raise ValueError(msg) if file_type.startswith("."): - raise ValueError("file_types should not start with a dot") + msg = "file_types should not start with a dot" + raise ValueError(msg) return v @@ -174,4 +177,5 @@ def validate_table_schema(cls, v): return TableSchema(columns=v) if isinstance(v, TableSchema): return v - raise ValueError("table_schema must be a TableSchema or a list of Columns") + msg = "table_schema must be a TableSchema or a list of Columns" + raise ValueError(msg) diff --git a/src/backend/base/langflow/inputs/inputs.py b/src/backend/base/langflow/inputs/inputs.py index 51bb741a9a20..a0dd522c9ce6 100644 --- a/src/backend/base/langflow/inputs/inputs.py +++ b/src/backend/base/langflow/inputs/inputs.py @@ -35,14 +35,16 @@ class TableInput(BaseInputMixin, MetadataTraceMixin, TableMixin, ListableInputMi def validate_value(cls, v: Any, _info): # Check if value is a list of dicts if not isinstance(v, list): - raise ValueError(f"TableInput value must be a list of dictionaries or Data. Value '{v}' is not a list.") + msg = f"TableInput value must be a list of dictionaries or Data. Value '{v}' is not a list." + raise ValueError(msg) for item in v: if not isinstance(item, dict | Data): - raise ValueError( + msg = ( "TableInput value must be a list of dictionaries or Data. " f"Item '{item}' is not a dictionary or Data." ) + raise ValueError(msg) return v @@ -149,7 +151,8 @@ def _validate_value(v: Any, _info): return v if isinstance(v, str): return Message(text=v) - raise ValueError(f"Invalid value type {type(v)}") + msg = f"Invalid value type {type(v)}" + raise ValueError(msg) class MessageTextInput(StrInput, MetadataTraceMixin, InputTraceMixin): @@ -194,15 +197,17 @@ def _validate_value(v: Any, _info): else: keys = ", ".join(v.data.keys()) input_name = _info.data["name"] - raise ValueError( + msg = ( f"The input to '{input_name}' must contain the key '{v.text_key}'." f"You can set `text_key` to one of the following keys: {keys} " "or set the value using another Component." ) + raise ValueError(msg) elif isinstance(v, AsyncIterator | Iterator): value = v else: - raise ValueError(f"Invalid value type {type(v)}") + msg = f"Invalid value type {type(v)}" + raise ValueError(msg) return value @@ -277,17 +282,19 @@ def validate_value(cls, v: Any, _info): else: keys = ", ".join(v.data.keys()) input_name = _info.data["name"] - raise ValueError( + msg = ( f"The input to '{input_name}' must contain the key '{v.text_key}'." f"You can set `text_key` to one of the following keys: {keys} " "or set the value using another Component." ) + raise ValueError(msg) elif isinstance(v, AsyncIterator | Iterator): value = v elif v is None: value = None else: - raise ValueError(f"Invalid value type `{type(v)}` for input `{_info.data['name']}`") + msg = f"Invalid value type `{type(v)}` for input `{_info.data['name']}`" + raise ValueError(msg) return value @@ -322,7 +329,8 @@ def validate_value(cls, v: Any, _info): """ if v and not isinstance(v, int | float): - raise ValueError(f"Invalid value type {type(v)} for input {_info.data.get('name')}.") + msg = f"Invalid value type {type(v)} for input {_info.data.get('name')}." + raise ValueError(msg) if isinstance(v, float): v = int(v) return v @@ -358,7 +366,8 @@ def validate_value(cls, v: Any, _info): ValueError: If the value is not of a valid type or if the input is missing a required key. """ if v and not isinstance(v, int | float): - raise ValueError(f"Invalid value type {type(v)} for input {_info.data.get('name')}.") + msg = f"Invalid value type {type(v)} for input {_info.data.get('name')}." + raise ValueError(msg) if isinstance(v, int): v = float(v) return v @@ -453,10 +462,12 @@ class MultiselectInput(BaseInputMixin, ListableInputMixin, DropDownMixin, Metada def validate_value(cls, v: Any, _info): # Check if value is a list of dicts if not isinstance(v, list): - raise ValueError(f"MultiselectInput value must be a list. Value: '{v}'") + msg = f"MultiselectInput value must be a list. Value: '{v}'" + raise ValueError(msg) for item in v: if not isinstance(item, str): - raise ValueError(f"MultiselectInput value must be a list of strings. Item: '{item}' is not a string") + msg = f"MultiselectInput value must be a list of strings. Item: '{item}' is not a string" + raise ValueError(msg) return v @@ -527,4 +538,5 @@ def instantiate_input(input_type: str, data: dict) -> InputTypes: if input_type_class: return input_type_class(**data) else: - raise ValueError(f"Invalid input type: {input_type}") + msg = f"Invalid input type: {input_type}" + raise ValueError(msg) diff --git a/src/backend/base/langflow/inputs/validators.py b/src/backend/base/langflow/inputs/validators.py index 7056265f89a3..ac9dc12bb7d8 100644 --- a/src/backend/base/langflow/inputs/validators.py +++ b/src/backend/base/langflow/inputs/validators.py @@ -13,7 +13,8 @@ def validate_boolean(value: bool) -> bool: if isinstance(value, bool): return value else: - raise ValueError("Value must be a boolean") + msg = "Value must be a boolean" + raise ValueError(msg) CoalesceBool = Annotated[bool, PlainValidator(validate_boolean)] diff --git a/src/backend/base/langflow/interface/initialize/loading.py b/src/backend/base/langflow/interface/initialize/loading.py index e5beee40567d..97d311cd1b88 100644 --- a/src/backend/base/langflow/interface/initialize/loading.py +++ b/src/backend/base/langflow/interface/initialize/loading.py @@ -32,7 +32,8 @@ async def instantiate_class( logger.debug(f"Instantiating {vertex_type} of type {base_type}") if not base_type: - raise ValueError("No base type provided for vertex") + msg = "No base type provided for vertex" + raise ValueError(msg) custom_params = get_params(vertex.params) code = custom_params.pop("code") @@ -65,7 +66,8 @@ async def get_instance_results( elif base_type == "component": return await build_component(params=custom_params, custom_component=custom_component) else: - raise ValueError(f"Base type {base_type} not found.") + msg = f"Base type {base_type} not found." + raise ValueError(msg) def get_params(vertex_params): @@ -125,7 +127,8 @@ def update_params_with_load_from_db_fields( if fallback_to_env_vars and key is None: var = os.getenv(params[field]) if var is None: - raise ValueError(f"Environment variable {params[field]} is not set.") + msg = f"Environment variable {params[field]} is not set." + raise ValueError(msg) key = var logger.info(f"Using environment variable {params[field]} for {field}") if key is None: @@ -197,4 +200,5 @@ async def build_custom_component(params: dict, custom_component: CustomComponent custom_component._results = {custom_component._vertex.outputs[0].get("name"): build_result} return custom_component, build_result, artifact - raise ValueError("Custom component does not have a vertex") + msg = "Custom component does not have a vertex" + raise ValueError(msg) diff --git a/src/backend/base/langflow/interface/utils.py b/src/backend/base/langflow/interface/utils.py index e7f72cabc256..10d595130198 100644 --- a/src/backend/base/langflow/interface/utils.py +++ b/src/backend/base/langflow/interface/utils.py @@ -15,7 +15,8 @@ def load_file_into_dict(file_path: str) -> dict: if not os.path.exists(file_path): - raise FileNotFoundError(f"File not found: {file_path}") + msg = f"File not found: {file_path}" + raise FileNotFoundError(msg) # Files names are UUID, so we can't find the extension with open(file_path) as file: @@ -25,7 +26,8 @@ def load_file_into_dict(file_path: str) -> dict: file.seek(0) data = yaml.safe_load(file) except ValueError as exc: - raise ValueError("Invalid file type. Expected .json or .yaml.") from exc + msg = "Invalid file type. Expected .json or .yaml." + raise ValueError(msg) from exc return data diff --git a/src/backend/base/langflow/io/schema.py b/src/backend/base/langflow/io/schema.py index c0937ca62097..72342dcccc34 100644 --- a/src/backend/base/langflow/io/schema.py +++ b/src/backend/base/langflow/io/schema.py @@ -22,7 +22,8 @@ def create_input_schema(inputs: list["InputTypes"]) -> type[BaseModel]: if not isinstance(inputs, list): - raise TypeError("inputs must be a list of Inputs") + msg = "inputs must be a list of Inputs" + raise TypeError(msg) fields = {} for input_model in inputs: # Create a Pydantic Field for each input field @@ -41,7 +42,8 @@ def create_input_schema(inputs: list["InputTypes"]) -> type[BaseModel]: elif input_model.display_name: name = input_model.display_name else: - raise ValueError("Input name or display_name is required") + msg = "Input name or display_name is required" + raise ValueError(msg) field_dict = { "title": name, "description": input_model.info or "", diff --git a/src/backend/base/langflow/load/load.py b/src/backend/base/langflow/load/load.py index b362deb6917e..ebf0a1951f6a 100644 --- a/src/backend/base/langflow/load/load.py +++ b/src/backend/base/langflow/load/load.py @@ -59,7 +59,8 @@ def load_flow_from_json( elif isinstance(flow, dict): flow_graph = flow else: - raise TypeError("Input must be either a file path (str) or a JSON object (dict)") + msg = "Input must be either a file path (str) or a JSON object (dict)" + raise TypeError(msg) graph_data = flow_graph["data"] if tweaks is not None: diff --git a/src/backend/base/langflow/load/utils.py b/src/backend/base/langflow/load/utils.py index 11c04c6c4d3e..db3112da9728 100644 --- a/src/backend/base/langflow/load/utils.py +++ b/src/backend/base/langflow/load/utils.py @@ -25,9 +25,11 @@ def upload(file_path, host, flow_id): if response.status_code == 200 or response.status_code == 201: return response.json() else: - raise Exception(f"Error uploading file: {response.status_code}") + msg = f"Error uploading file: {response.status_code}" + raise Exception(msg) except Exception as e: - raise Exception(f"Error uploading file: {e}") + msg = f"Error uploading file: {e}" + raise Exception(msg) def upload_file(file_path: str, host: str, flow_id: str, components: list[str], tweaks: dict | None = None): @@ -57,12 +59,15 @@ def upload_file(file_path: str, host: str, flow_id: str, components: list[str], if isinstance(component, str): tweaks[component] = {"path": response["file_path"]} else: - raise ValueError(f"Component ID or name must be a string. Got {type(component)}") + msg = f"Component ID or name must be a string. Got {type(component)}" + raise ValueError(msg) return tweaks else: - raise ValueError("Error uploading file") + msg = "Error uploading file" + raise ValueError(msg) except Exception as e: - raise ValueError(f"Error uploading file: {e}") + msg = f"Error uploading file: {e}" + raise ValueError(msg) def get_flow(url: str, flow_id: str): @@ -87,6 +92,8 @@ def get_flow(url: str, flow_id: str): flow = FlowBase(**json_response).model_dump() return flow else: - raise Exception(f"Error retrieving flow: {response.status_code}") + msg = f"Error retrieving flow: {response.status_code}" + raise Exception(msg) except Exception as e: - raise Exception(f"Error retrieving flow: {e}") + msg = f"Error retrieving flow: {e}" + raise Exception(msg) diff --git a/src/backend/base/langflow/main.py b/src/backend/base/langflow/main.py index 6936cf86fd56..0dc604102d44 100644 --- a/src/backend/base/langflow/main.py +++ b/src/backend/base/langflow/main.py @@ -183,7 +183,8 @@ async def flatten_query_string_lists(request: Request, call_next): settings.prometheus_enabled = True settings.prometheus_port = prome_port else: - raise ValueError(f"Invalid port number {prome_port_str}") + msg = f"Invalid port number {prome_port_str}" + raise ValueError(msg) if settings.prometheus_enabled: from prometheus_client import start_http_server # type: ignore @@ -246,7 +247,8 @@ async def custom_404_handler(request, __): path = static_files_dir / "index.html" if not path.exists(): - raise RuntimeError(f"File at path {path} does not exist.") + msg = f"File at path {path} does not exist." + raise RuntimeError(msg) return FileResponse(path) @@ -264,7 +266,8 @@ def setup_app(static_files_dir: Path | None = None, backend_only: bool = False) static_files_dir = get_static_files_dir() if not backend_only and (not static_files_dir or not static_files_dir.exists()): - raise RuntimeError(f"Static files directory {static_files_dir} does not exist.") + msg = f"Static files directory {static_files_dir} does not exist." + raise RuntimeError(msg) app = create_app() if not backend_only and static_files_dir is not None: setup_static_files(app, static_files_dir) diff --git a/src/backend/base/langflow/memory.py b/src/backend/base/langflow/memory.py index 40c0f8eda2a1..ac13343ae990 100644 --- a/src/backend/base/langflow/memory.py +++ b/src/backend/base/langflow/memory.py @@ -70,7 +70,8 @@ def add_messages(messages: Message | list[Message], flow_id: str | None = None): if not all(isinstance(message, Message) for message in messages): types = ", ".join([str(type(message)) for message in messages]) - raise ValueError(f"The messages must be instances of Message. Found: {types}") + msg = f"The messages must be instances of Message. Found: {types}" + raise ValueError(msg) messages_models: list[MessageTable] = [] for msg in messages: @@ -133,7 +134,8 @@ def store_message( return [] if not message.session_id or not message.sender or not message.sender_name: - raise ValueError("All of session_id, sender, and sender_name must be provided.") + msg = "All of session_id, sender, and sender_name must be provided." + raise ValueError(msg) return add_messages([message], flow_id=flow_id) diff --git a/src/backend/base/langflow/processing/process.py b/src/backend/base/langflow/processing/process.py index d681889b7c35..5682f7bffb23 100644 --- a/src/backend/base/langflow/processing/process.py +++ b/src/backend/base/langflow/processing/process.py @@ -116,12 +116,14 @@ def validate_input( graph_data: dict[str, Any], tweaks: Union["Tweaks", dict[str, str | dict[str, Any]]] ) -> list[dict[str, Any]]: if not isinstance(graph_data, dict) or not isinstance(tweaks, dict): - raise ValueError("graph_data and tweaks should be dictionaries") + msg = "graph_data and tweaks should be dictionaries" + raise ValueError(msg) nodes = graph_data.get("data", {}).get("nodes") or graph_data.get("nodes") if not isinstance(nodes, list): - raise ValueError("graph_data should contain a list of nodes under 'data' key or directly under 'nodes' key") + msg = "graph_data should contain a list of nodes under 'data' key or directly under 'nodes' key" + raise ValueError(msg) return nodes diff --git a/src/backend/base/langflow/schema/data.py b/src/backend/base/langflow/schema/data.py index 605a36275999..e05ac1a22ed5 100644 --- a/src/backend/base/langflow/schema/data.py +++ b/src/backend/base/langflow/schema/data.py @@ -27,7 +27,8 @@ class Data(BaseModel): @classmethod def validate_data(cls, values): if not isinstance(values, dict): - raise ValueError("Data must be a dictionary") + msg = "Data must be a dictionary" + raise ValueError(msg) if not values.get("data"): values["data"] = {} # Any other keyword should be added to the data dictionary @@ -130,7 +131,8 @@ def to_lc_message( # But first we check if all required keys are present in the data dictionary # they are: "text", "sender" if not all(key in self.data for key in ["text", "sender"]): - raise ValueError(f"Missing required keys ('text', 'sender') in Data: {self.data}") + msg = f"Missing required keys ('text', 'sender') in Data: {self.data}" + raise ValueError(msg) sender = self.data.get("sender", MESSAGE_SENDER_AI) text = self.data.get("text", "") files = self.data.get("files", []) @@ -165,7 +167,8 @@ def __getattr__(self, key): return self.data[key] except KeyError: # Fallback to default behavior to raise AttributeError for undefined attributes - raise AttributeError(f"'{type(self).__name__}' object has no attribute '{key}'") + msg = f"'{type(self).__name__}' object has no attribute '{key}'" + raise AttributeError(msg) def __setattr__(self, key, value): """ diff --git a/src/backend/base/langflow/schema/dotdict.py b/src/backend/base/langflow/schema/dotdict.py index 35a72ea01fc0..5941cbd3d582 100644 --- a/src/backend/base/langflow/schema/dotdict.py +++ b/src/backend/base/langflow/schema/dotdict.py @@ -30,7 +30,8 @@ def __getattr__(self, attr): self[attr] = value # Update self to nest dotdict for future accesses return value except KeyError: - raise AttributeError(f"'dotdict' object has no attribute '{attr}'") + msg = f"'dotdict' object has no attribute '{attr}'" + raise AttributeError(msg) def __setattr__(self, key, value): """ @@ -57,7 +58,8 @@ def __delattr__(self, key): try: del self[key] except KeyError: - raise AttributeError(f"'dotdict' object has no attribute '{key}'") + msg = f"'dotdict' object has no attribute '{key}'" + raise AttributeError(msg) def __missing__(self, key): """ diff --git a/src/backend/base/langflow/schema/image.py b/src/backend/base/langflow/schema/image.py index 59c159c8e7f8..ecdabf01e951 100644 --- a/src/backend/base/langflow/schema/image.py +++ b/src/backend/base/langflow/schema/image.py @@ -51,7 +51,8 @@ def to_base64(self): if self.path: files = get_files([self.path], convert_to_base64=True) return files[0] - raise ValueError("Image path is not set.") + msg = "Image path is not set." + raise ValueError(msg) def to_content_dict(self): return { diff --git a/src/backend/base/langflow/schema/message.py b/src/backend/base/langflow/schema/message.py index 48c2be6e5e3d..3b28e47d83fe 100644 --- a/src/backend/base/langflow/schema/message.py +++ b/src/backend/base/langflow/schema/message.py @@ -32,7 +32,8 @@ def _timestamp_to_str(timestamp: datetime | str) -> str: datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S") return timestamp except ValueError: - raise ValueError(f"Invalid timestamp: {timestamp}") + msg = f"Invalid timestamp: {timestamp}" + raise ValueError(msg) return timestamp.strftime("%Y-%m-%d %H:%M:%S") @@ -186,7 +187,8 @@ async def get_file_content_dicts(self): def load_lc_prompt(self): if "prompt" not in self: - raise ValueError("Prompt is required.") + msg = "Prompt is required." + raise ValueError(msg) # self.prompt was passed through jsonable_encoder # so inner messages are not BaseMessage # we need to convert them to BaseMessage @@ -268,7 +270,8 @@ def json(self, **kwargs): def custom_encoder(obj): if isinstance(obj, datetime): return obj.isoformat() - raise TypeError(f"Object of type {obj.__class__.__name__} is not JSON serializable") + msg = f"Object of type {obj.__class__.__name__} is not JSON serializable" + raise TypeError(msg) class MessageResponse(DefaultModel): @@ -305,7 +308,8 @@ def serialize_files(cls, v): def from_message(cls, message: Message, flow_id: str | None = None): # first check if the record has all the required fields if message.text is None or not message.sender or not message.sender_name: - raise ValueError("The message does not have the required fields (text, sender, sender_name).") + msg = "The message does not have the required fields (text, sender, sender_name)." + raise ValueError(msg) return cls( sender=message.sender, sender_name=message.sender_name, diff --git a/src/backend/base/langflow/schema/table.py b/src/backend/base/langflow/schema/table.py index 2754406f4eed..8f265647b095 100644 --- a/src/backend/base/langflow/schema/table.py +++ b/src/backend/base/langflow/schema/table.py @@ -23,7 +23,8 @@ def validate_formatter(cls, value): return FormatterType(value) if isinstance(value, FormatterType): return value - raise ValueError("Invalid formatter type") + msg = "Invalid formatter type" + raise ValueError(msg) class TableSchema(BaseModel): diff --git a/src/backend/base/langflow/services/auth/utils.py b/src/backend/base/langflow/services/auth/utils.py index ca963ade54ad..98042cae4045 100644 --- a/src/backend/base/langflow/services/auth/utils.py +++ b/src/backend/base/langflow/services/auth/utils.py @@ -67,7 +67,8 @@ async def api_key_security( return UserRead.model_validate(result.user, from_attributes=True) elif isinstance(result, User): return UserRead.model_validate(result, from_attributes=True) - raise ValueError("Invalid result type") + msg = "Invalid result type" + raise ValueError(msg) async def get_current_user( diff --git a/src/backend/base/langflow/services/cache/factory.py b/src/backend/base/langflow/services/cache/factory.py index c02ac1797733..3d7887f9c137 100644 --- a/src/backend/base/langflow/services/cache/factory.py +++ b/src/backend/base/langflow/services/cache/factory.py @@ -31,7 +31,8 @@ def create(self, settings_service: "SettingsService"): return redis_cache else: # do not attempt to fallback to another cache type - raise ConnectionError("Failed to connect to Redis cache") + msg = "Failed to connect to Redis cache" + raise ConnectionError(msg) elif settings_service.settings.cache_type == "memory": return ThreadingInMemoryCache(expiration_time=settings_service.settings.cache_expire) diff --git a/src/backend/base/langflow/services/cache/service.py b/src/backend/base/langflow/services/cache/service.py index 2511b76dc4f0..3a3056c052c2 100644 --- a/src/backend/base/langflow/services/cache/service.py +++ b/src/backend/base/langflow/services/cache/service.py @@ -216,10 +216,11 @@ def __init__(self, host="localhost", port=6379, db=0, url=None, expiration_time= try: import redis except ImportError as exc: - raise ImportError( + msg = ( "RedisCache requires the redis-py package." " Please install Langflow with the deploy extra: pip install langflow[deploy]" - ) from exc + ) + raise ImportError(msg) from exc logger.warning( "RedisCache is an experimental feature and may not work as expected." " Please report any issues to our GitHub repository." @@ -271,9 +272,11 @@ async def set(self, key, value, lock=None): if pickled := pickle.dumps(value): result = self._client.setex(str(key), self.expiration_time, pickled) if not result: - raise ValueError("RedisCache could not set the value.") + msg = "RedisCache could not set the value." + raise ValueError(msg) except TypeError as exc: - raise TypeError("RedisCache only accepts values that can be pickled. ") from exc + msg = "RedisCache only accepts values that can be pickled. " + raise TypeError(msg) from exc async def upsert(self, key, value, lock=None): """ diff --git a/src/backend/base/langflow/services/cache/utils.py b/src/backend/base/langflow/services/cache/utils.py index acff20f2873f..24b461a31547 100644 --- a/src/backend/base/langflow/services/cache/utils.py +++ b/src/backend/base/langflow/services/cache/utils.py @@ -90,12 +90,14 @@ def save_binary_file(content: str, file_name: str, accepted_types: list[str]) -> The path to the saved file. """ if not any(file_name.endswith(suffix) for suffix in accepted_types): - raise ValueError(f"File {file_name} is not accepted") + msg = f"File {file_name} is not accepted" + raise ValueError(msg) # Get the destination folder cache_path = Path(CACHE_DIR) / PREFIX if not content: - raise ValueError("Please, reload the file in the loader.") + msg = "Please, reload the file in the loader." + raise ValueError(msg) data = content.split(",")[1] decoded_bytes = base64.b64decode(data) @@ -161,7 +163,8 @@ def save_uploaded_file(file: UploadFile, folder_name): def update_build_status(cache_service, flow_id: str, status: "BuildStatus"): cached_flow = cache_service[flow_id] if cached_flow is None: - raise ValueError(f"Flow {flow_id} not found in cache") + msg = f"Flow {flow_id} not found in cache" + raise ValueError(msg) cached_flow["status"] = status cache_service[flow_id] = cached_flow cached_flow["status"] = status diff --git a/src/backend/base/langflow/services/chat/cache.py b/src/backend/base/langflow/services/chat/cache.py index 83de27ae99d0..c66b3bdaa6b2 100644 --- a/src/backend/base/langflow/services/chat/cache.py +++ b/src/backend/base/langflow/services/chat/cache.py @@ -115,7 +115,8 @@ def add_pandas(self, name: str, obj: Any): if isinstance(obj, pd.DataFrame | pd.Series): self.add(name, obj.to_csv(), "pandas", extension="csv") else: - raise ValueError("Object is not a pandas DataFrame or Series") + msg = "Object is not a pandas DataFrame or Series" + raise ValueError(msg) def add_image(self, name: str, obj: Any, extension: str = "png"): """ @@ -128,7 +129,8 @@ def add_image(self, name: str, obj: Any, extension: str = "png"): if isinstance(obj, Image.Image): self.add(name, obj, "image", extension=extension) else: - raise ValueError("Object is not a PIL Image") + msg = "Object is not a PIL Image" + raise ValueError(msg) def get(self, name: str): """ diff --git a/src/backend/base/langflow/services/database/factory.py b/src/backend/base/langflow/services/database/factory.py index f9c269f12caf..6593c5c23fd7 100644 --- a/src/backend/base/langflow/services/database/factory.py +++ b/src/backend/base/langflow/services/database/factory.py @@ -14,5 +14,6 @@ def __init__(self): def create(self, settings_service: "SettingsService"): # Here you would have logic to create and configure a DatabaseService if not settings_service.settings.database_url: - raise ValueError("No database URL provided") + msg = "No database URL provided" + raise ValueError(msg) return DatabaseService(settings_service) diff --git a/src/backend/base/langflow/services/database/models/api_key/crud.py b/src/backend/base/langflow/services/database/models/api_key/crud.py index 6fa310373382..05dd14784fec 100644 --- a/src/backend/base/langflow/services/database/models/api_key/crud.py +++ b/src/backend/base/langflow/services/database/models/api_key/crud.py @@ -37,7 +37,8 @@ def create_api_key(session: Session, api_key_create: ApiKeyCreate, user_id: UUID def delete_api_key(session: Session, api_key_id: UUID) -> None: api_key = session.get(ApiKey, api_key_id) if api_key is None: - raise ValueError("API Key not found") + msg = "API Key not found" + raise ValueError(msg) session.delete(api_key) session.commit() @@ -65,7 +66,8 @@ def update_total_uses(session, api_key: ApiKey): with Session(session.get_bind()) as new_session: new_api_key = new_session.get(ApiKey, api_key.id) if new_api_key is None: - raise ValueError("API Key not found") + msg = "API Key not found" + raise ValueError(msg) new_api_key.total_uses += 1 new_api_key.last_used_at = datetime.datetime.now(datetime.timezone.utc) new_session.add(new_api_key) diff --git a/src/backend/base/langflow/services/database/models/flow/model.py b/src/backend/base/langflow/services/database/models/flow/model.py index 26bb80e31cba..809859d314e3 100644 --- a/src/backend/base/langflow/services/database/models/flow/model.py +++ b/src/backend/base/langflow/services/database/models/flow/model.py @@ -54,14 +54,17 @@ def validate_endpoint_name(cls, v): @field_validator("icon_bg_color") def validate_icon_bg_color(cls, v): if v is not None and not isinstance(v, str): - raise ValueError("Icon background color must be a string") + msg = "Icon background color must be a string" + raise ValueError(msg) # validate that is is a hex color if v and not v.startswith("#"): - raise ValueError("Icon background color must start with #") + msg = "Icon background color must start with #" + raise ValueError(msg) # validate that it is a valid hex color if v and len(v) != 7: - raise ValueError("Icon background color must be 7 characters long") + msg = "Icon background color must be 7 characters long" + raise ValueError(msg) return v @field_validator("icon") @@ -79,7 +82,8 @@ def validate_icon_atr(cls, v): elif not v.startswith(":") or not v.endswith(":"): # emoji should have both starting and ending colons # so if one of them is missing, we will raise - raise ValueError(f"Invalid emoji. {v} is not a valid emoji.") + msg = f"Invalid emoji. {v} is not a valid emoji." + raise ValueError(msg) emoji_value = emoji.emojize(v, variant="emoji_type") if v == emoji_value: @@ -92,12 +96,15 @@ def validate_icon_atr(cls, v): return icon # otherwise it should be a valid lucide icon if v is not None and not isinstance(v, str): - raise ValueError("Icon must be a string") + msg = "Icon must be a string" + raise ValueError(msg) # is should be lowercase and contain only letters and hyphens if v and not v.islower(): - raise ValueError("Icon must be lowercase") + msg = "Icon must be lowercase" + raise ValueError(msg) if v and not v.replace("-", "").isalpha(): - raise ValueError("Icon must contain only letters and hyphens") + msg = "Icon must contain only letters and hyphens" + raise ValueError(msg) return v @field_validator("data") @@ -105,13 +112,16 @@ def validate_json(v): if not v: return v if not isinstance(v, dict): - raise ValueError("Flow must be a valid JSON") + msg = "Flow must be a valid JSON" + raise ValueError(msg) # data must contain nodes and edges if "nodes" not in v.keys(): - raise ValueError("Flow must have nodes") + msg = "Flow must have nodes" + raise ValueError(msg) if "edges" not in v.keys(): - raise ValueError("Flow must have edges") + msg = "Flow must have edges" + raise ValueError(msg) return v diff --git a/src/backend/base/langflow/services/database/models/flow/utils.py b/src/backend/base/langflow/services/database/models/flow/utils.py index 4bfe246e7344..54f085d9f4c3 100644 --- a/src/backend/base/langflow/services/database/models/flow/utils.py +++ b/src/backend/base/langflow/services/database/models/flow/utils.py @@ -11,7 +11,8 @@ def get_flow_by_id(session: Session = Depends(get_session), flow_id: str | None """Get flow by id.""" if flow_id is None: - raise ValueError("Flow id is required.") + msg = "Flow id is required." + raise ValueError(msg) return session.get(Flow, flow_id) diff --git a/src/backend/base/langflow/services/database/models/message/crud.py b/src/backend/base/langflow/services/database/models/message/crud.py index 05c566fe9af5..9b408da809b1 100644 --- a/src/backend/base/langflow/services/database/models/message/crud.py +++ b/src/backend/base/langflow/services/database/models/message/crud.py @@ -10,7 +10,8 @@ def update_message(message_id: UUID, message: MessageUpdate | dict): with session_scope() as session: db_message = session.get(MessageTable, message_id) if not db_message: - raise ValueError("Message not found") + msg = "Message not found" + raise ValueError(msg) message_dict = message.model_dump(exclude_unset=True, exclude_none=True) db_message.sqlmodel_update(message_dict) session.add(db_message) diff --git a/src/backend/base/langflow/services/database/models/message/model.py b/src/backend/base/langflow/services/database/models/message/model.py index d6e027885d6b..8fbbfe057295 100644 --- a/src/backend/base/langflow/services/database/models/message/model.py +++ b/src/backend/base/langflow/services/database/models/message/model.py @@ -29,7 +29,8 @@ def validate_files(cls, value): def from_message(cls, message: "Message", flow_id: str | UUID | None = None): # first check if the record has all the required fields if message.text is None or not message.sender or not message.sender_name: - raise ValueError("The message does not have the required fields (text, sender, sender_name).") + msg = "The message does not have the required fields (text, sender, sender_name)." + raise ValueError(msg) if message.files: image_paths = [] for file in message.files: diff --git a/src/backend/base/langflow/services/database/service.py b/src/backend/base/langflow/services/database/service.py index 77416639d96d..1e0a5d52cd2d 100644 --- a/src/backend/base/langflow/services/database/service.py +++ b/src/backend/base/langflow/services/database/service.py @@ -35,7 +35,8 @@ class DatabaseService(Service): def __init__(self, settings_service: "SettingsService"): self.settings_service = settings_service if settings_service.settings.database_url is None: - raise ValueError("No database URL provided") + msg = "No database URL provided" + raise ValueError(msg) self.database_url: str = settings_service.settings.database_url # This file is in langflow.services.database.manager.py # the ini is in langflow @@ -75,7 +76,8 @@ def _create_engine(self) -> "Engine": "To avoid this warning, update the database URL." ) return self._create_engine() - raise RuntimeError("Error creating database engine") from exc + msg = "Error creating database engine" + raise RuntimeError(msg) from exc def on_connection(self, dbapi_connection, connection_record): from sqlite3 import Connection as sqliteConnection @@ -116,7 +118,8 @@ def migrate_flows_if_auto_login(self): user = get_user_by_username(session, username) if not user: logger.error("Default superuser not found") - raise RuntimeError("Default superuser not found") + msg = "Default superuser not found" + raise RuntimeError(msg) for flow in flows: flow.user_id = user.id session.commit() @@ -193,7 +196,8 @@ def run_migrations(self, fix=False): self.init_alembic(alembic_cfg) except Exception as exc: logger.error(f"Error initializing alembic: {exc}") - raise RuntimeError("Error initializing alembic") from exc + msg = "Error initializing alembic" + raise RuntimeError(msg) from exc else: logger.info("Alembic already initialized") @@ -213,7 +217,8 @@ def run_migrations(self, fix=False): except util.exc.AutogenerateDiffsDetected as exc: logger.error(f"AutogenerateDiffsDetected: {exc}") if not fix: - raise RuntimeError(f"There's a mismatch between the models and the database.\n{exc}") + msg = f"There's a mismatch between the models and the database.\n{exc}" + raise RuntimeError(msg) if fix: self.try_downgrade_upgrade_until_success(alembic_cfg) @@ -284,7 +289,8 @@ def create_db_and_tables(self): logger.warning(f"Table {table} already exists, skipping. Exception: {oe}") except Exception as exc: logger.error(f"Error creating table {table}: {exc}") - raise RuntimeError(f"Error creating table {table}") from exc + msg = f"Error creating table {table}" + raise RuntimeError(msg) from exc # Now check if the required tables exist, if not, something went wrong. inspector = inspect(self.engine) @@ -293,7 +299,8 @@ def create_db_and_tables(self): if table not in table_names: logger.error("Something went wrong creating the database and tables.") logger.error("Please check your database settings.") - raise RuntimeError("Something went wrong creating the database and tables.") + msg = "Something went wrong creating the database and tables." + raise RuntimeError(msg) logger.debug("Database and tables created successfully") diff --git a/src/backend/base/langflow/services/database/utils.py b/src/backend/base/langflow/services/database/utils.py index 47eeb6b8ff90..f02b37f3ddcd 100644 --- a/src/backend/base/langflow/services/database/utils.py +++ b/src/backend/base/langflow/services/database/utils.py @@ -24,12 +24,14 @@ def initialize_database(fix_migration: bool = False): # we can ignore it if "already exists" not in str(exc): logger.error(f"Error creating DB and tables: {exc}") - raise RuntimeError("Error creating DB and tables") from exc + msg = "Error creating DB and tables" + raise RuntimeError(msg) from exc try: database_service.check_schema_health() except Exception as exc: logger.error(f"Error checking schema health: {exc}") - raise RuntimeError("Error checking schema health") from exc + msg = "Error checking schema health" + raise RuntimeError(msg) from exc try: database_service.run_migrations(fix=fix_migration) except CommandError as exc: diff --git a/src/backend/base/langflow/services/factory.py b/src/backend/base/langflow/services/factory.py index 41182924b34a..86db0daa744b 100644 --- a/src/backend/base/langflow/services/factory.py +++ b/src/backend/base/langflow/services/factory.py @@ -55,7 +55,8 @@ def infer_service_types(factory_class: type[ServiceFactory], available_services= service_type = ServiceType[type_name] service_types.append(service_type) except KeyError: - raise ValueError(f"No matching ServiceType for parameter type: {param_type.__name__}") + msg = f"No matching ServiceType for parameter type: {param_type.__name__}" + raise ValueError(msg) return service_types @@ -79,5 +80,6 @@ def import_all_services_into_a_dict(): break except Exception as exc: logger.exception(exc) - raise RuntimeError("Could not initialize services. Please check your settings.") from exc + msg = "Could not initialize services. Please check your settings." + raise RuntimeError(msg) from exc return services diff --git a/src/backend/base/langflow/services/manager.py b/src/backend/base/langflow/services/manager.py index 3fe82ecc56a1..401bd4b0db42 100644 --- a/src/backend/base/langflow/services/manager.py +++ b/src/backend/base/langflow/services/manager.py @@ -88,7 +88,8 @@ def _validate_service_creation(self, service_name: ServiceType, default: Service Validate whether the service can be created. """ if service_name not in self.factories and default is None: - raise NoFactoryRegisteredError(f"No factory registered for the service class '{service_name.name}'") + msg = f"No factory registered for the service class '{service_name.name}'" + raise NoFactoryRegisteredError(msg) def update(self, service_name: ServiceType): """ @@ -138,9 +139,8 @@ def get_factories(): except Exception as exc: logger.exception(exc) - raise RuntimeError( - f"Could not initialize services. Please check your settings. Error in {name}." - ) from exc + msg = f"Could not initialize services. Please check your settings. Error in {name}." + raise RuntimeError(msg) from exc return factories diff --git a/src/backend/base/langflow/services/settings/base.py b/src/backend/base/langflow/services/settings/base.py index b96530edbc40..388ca8d90cf7 100644 --- a/src/backend/base/langflow/services/settings/base.py +++ b/src/backend/base/langflow/services/settings/base.py @@ -215,7 +215,8 @@ def set_database_url(cls, value, info): # so we need to migrate to the new format # if there is a database in that location if not info.data["config_dir"]: - raise ValueError("config_dir not set, please set it or provide a database_url") + msg = "config_dir not set, please set it or provide a database_url" + raise ValueError(msg) from langflow.utils.version import get_version_info from langflow.utils.version import is_pre_release as langflow_is_pre_release @@ -370,7 +371,8 @@ def load_settings_from_yaml(file_path: str) -> Settings: for key in settings_dict: if key not in Settings.model_fields.keys(): - raise KeyError(f"Key {key} not found in settings") + msg = f"Key {key} not found in settings" + raise KeyError(msg) logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}") return Settings(**settings_dict) diff --git a/src/backend/base/langflow/services/settings/manager.py b/src/backend/base/langflow/services/settings/manager.py index fc127a5ea458..cbf0d1cc60b0 100644 --- a/src/backend/base/langflow/services/settings/manager.py +++ b/src/backend/base/langflow/services/settings/manager.py @@ -31,12 +31,14 @@ def load_settings_from_yaml(cls, file_path: str) -> "SettingsService": for key in settings_dict: if key not in Settings.model_fields.keys(): - raise KeyError(f"Key {key} not found in settings") + msg = f"Key {key} not found in settings" + raise KeyError(msg) logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}") settings = Settings(**settings_dict) if not settings.config_dir: - raise ValueError("CONFIG_DIR must be set in settings") + msg = "CONFIG_DIR must be set in settings" + raise ValueError(msg) auth_settings = AuthSettings( CONFIG_DIR=settings.config_dir, diff --git a/src/backend/base/langflow/services/settings/service.py b/src/backend/base/langflow/services/settings/service.py index b83b47387a3e..ebcaae0446f3 100644 --- a/src/backend/base/langflow/services/settings/service.py +++ b/src/backend/base/langflow/services/settings/service.py @@ -17,7 +17,8 @@ def initialize(cls) -> "SettingsService": settings = Settings() if not settings.config_dir: - raise ValueError("CONFIG_DIR must be set in settings") + msg = "CONFIG_DIR must be set in settings" + raise ValueError(msg) auth_settings = AuthSettings( CONFIG_DIR=settings.config_dir, diff --git a/src/backend/base/langflow/services/storage/local.py b/src/backend/base/langflow/services/storage/local.py index 1044a76fe278..7a33d88b1b12 100644 --- a/src/backend/base/langflow/services/storage/local.py +++ b/src/backend/base/langflow/services/storage/local.py @@ -57,7 +57,8 @@ async def get_file(self, flow_id: str, file_name: str) -> bytes: file_path = self.data_dir / flow_id / file_name if not file_path.exists(): logger.warning(f"File {file_name} not found in flow {flow_id}.") - raise FileNotFoundError(f"File {file_name} not found in flow {flow_id}") + msg = f"File {file_name} not found in flow {flow_id}" + raise FileNotFoundError(msg) def read_file(file_path: Path) -> bytes: with open(file_path, "rb") as f: @@ -78,7 +79,8 @@ async def list_files(self, flow_id: str): folder_path = self.data_dir / flow_id if not folder_path.exists() or not folder_path.is_dir(): logger.warning(f"Flow {flow_id} directory does not exist.") - raise FileNotFoundError(f"Flow {flow_id} directory does not exist.") + msg = f"Flow {flow_id} directory does not exist." + raise FileNotFoundError(msg) files = [file.name for file in folder_path.iterdir() if file.is_file()] logger.info(f"Listed {len(files)} files in flow {flow_id}.") diff --git a/src/backend/base/langflow/services/store/service.py b/src/backend/base/langflow/services/store/service.py index 0d9a3283632c..1afb575abddd 100644 --- a/src/backend/base/langflow/services/store/service.py +++ b/src/backend/base/langflow/services/store/service.py @@ -41,7 +41,8 @@ async def user_data_context(store_service: "StoreService", api_key: str | None = user_data_var.set(user_data[0]) except HTTPStatusError as exc: if exc.response.status_code == 403: - raise ValueError("Invalid API key") + msg = "Invalid API key" + raise ValueError(msg) try: yield finally: @@ -116,9 +117,11 @@ async def check_api_key(self, api_key: str): if exc.response.status_code in [403, 401]: return False else: - raise ValueError(f"Unexpected status code: {exc.response.status_code}") + msg = f"Unexpected status code: {exc.response.status_code}" + raise ValueError(msg) except Exception as exc: - raise ValueError(f"Unexpected error: {exc}") + msg = f"Unexpected error: {exc}" + raise ValueError(msg) async def _get( self, url: str, api_key: str | None = None, params: dict[str, Any] | None = None @@ -135,7 +138,8 @@ async def _get( except HTTPError as exc: raise exc except Exception as exc: - raise ValueError(f"GET failed: {exc}") + msg = f"GET failed: {exc}" + raise ValueError(msg) json_response = response.json() result = json_response["data"] metadata = {} @@ -228,15 +232,18 @@ def build_filter_conditions( liked_filter = self.build_liked_filter() filter_conditions.append(liked_filter) elif liked and not store_api_key: - raise APIKeyError("You must provide an API key to filter by likes") + msg = "You must provide an API key to filter by likes" + raise APIKeyError(msg) if filter_by_user and store_api_key: user_data = user_data_var.get() if not user_data: - raise ValueError("No user data") + msg = "No user data" + raise ValueError(msg) filter_conditions.append({"user_created": {"_eq": user_data["id"]}}) elif filter_by_user and not store_api_key: - raise APIKeyError("You must provide an API key to filter your components") + msg = "You must provide an API key to filter your components" + raise APIKeyError(msg) else: filter_conditions.append({"private": {"_eq": False}}) @@ -246,7 +253,8 @@ def build_liked_filter(self): user_data = user_data_var.get() # params["filter"] = json.dumps({"user_created": {"_eq": user_data["id"]}}) if not user_data: - raise ValueError("No user data") + msg = "No user data" + raise ValueError(msg) return {"liked_by": {"directus_users_id": {"_eq": user_data["id"]}}} async def query_components( @@ -294,7 +302,8 @@ async def get_liked_by_user_components(self, component_ids: list[str], api_key: # return the ids user_data = user_data_var.get() if not user_data: - raise ValueError("No user data") + msg = "No user data" + raise ValueError(msg) params = { "fields": "id", "filter": json.dumps( @@ -313,7 +322,8 @@ async def get_liked_by_user_components(self, component_ids: list[str], api_key: async def get_components_in_users_collection(self, component_ids: list[str], api_key: str): user_data = user_data_var.get() if not user_data: - raise ValueError("No user data") + msg = "No user data" + raise ValueError(msg) params = { "fields": "id", "filter": json.dumps( @@ -332,11 +342,13 @@ async def download(self, api_key: str, component_id: UUID) -> DownloadComponentR url = f"{self.components_url}/{component_id}" params = {"fields": "id,name,description,data,is_component,metadata"} if not self.download_webhook_url: - raise ValueError("DOWNLOAD_WEBHOOK_URL is not set") + msg = "DOWNLOAD_WEBHOOK_URL is not set" + raise ValueError(msg) component, _ = await self._get(url, api_key, params) await self.call_webhook(api_key, self.download_webhook_url, component_id) if len(component) > 1: - raise ValueError("Something went wrong while downloading the component") + msg = "Something went wrong while downloading the component" + raise ValueError(msg) component_dict = component[0] download_component = DownloadComponentResponse(**component_dict) @@ -346,7 +358,8 @@ async def download(self, api_key: str, component_id: UUID) -> DownloadComponentR try: download_component.metadata = process_component_data(download_component.data.get("nodes", [])) except KeyError: - raise ValueError("Invalid component data. No nodes found") + msg = "Invalid component data. No nodes found" + raise ValueError(msg) return download_component async def upload(self, api_key: str, component_data: StoreComponentCreate) -> CreateComponentResponse: @@ -380,7 +393,8 @@ async def upload(self, api_key: str, component_data: StoreComponentCreate) -> Cr raise FilterError(message) except UnboundLocalError: pass - raise ValueError(f"Upload failed: {exc}") + msg = f"Upload failed: {exc}" + raise ValueError(msg) async def update( self, api_key: str, component_id: UUID, component_data: StoreComponentCreate @@ -416,7 +430,8 @@ async def update( raise FilterError(message) except UnboundLocalError: pass - raise ValueError(f"Upload failed: {exc}") + msg = f"Upload failed: {exc}" + raise ValueError(msg) async def get_tags(self) -> list[dict[str, Any]]: url = f"{self.base_url}/items/tags" @@ -440,21 +455,24 @@ async def get_component_likes_count(self, component_id: str, api_key: str | None } result, _ = await self._get(url, api_key=api_key, params=params) if len(result) == 0: - raise ValueError("Component not found") + msg = "Component not found" + raise ValueError(msg) likes = result[0]["liked_by_count"] # likes_by_count is a string # try to convert it to int try: likes = int(likes) except ValueError: - raise ValueError(f"Unexpected value for likes count: {likes}") + msg = f"Unexpected value for likes count: {likes}" + raise ValueError(msg) return likes async def like_component(self, api_key: str, component_id: str) -> bool: # if it returns a list with one id, it means the like was successful # if it returns an int, it means the like was removed if not self.like_webhook_url: - raise ValueError("LIKE_WEBHOOK_URL is not set") + msg = "LIKE_WEBHOOK_URL is not set" + raise ValueError(msg) headers = {"Authorization": f"Bearer {api_key}"} # response = httpx.post( # self.like_webhook_url, @@ -479,9 +497,11 @@ async def like_component(self, api_key: str, component_id: str) -> bool: elif isinstance(result, int): return False else: - raise ValueError(f"Unexpected result: {result}") + msg = f"Unexpected result: {result}" + raise ValueError(msg) else: - raise ValueError(f"Unexpected status code: {response.status_code}") + msg = f"Unexpected status code: {response.status_code}" + raise ValueError(msg) async def get_list_component_response_model( self, @@ -528,13 +548,14 @@ async def get_list_component_response_model( comp_count = metadata.get("filter_count", 0) except HTTPStatusError as exc: if exc.response.status_code == 403: - raise ForbiddenError("You are not authorized to access this public resource") from exc + msg = "You are not authorized to access this public resource" + raise ForbiddenError(msg) from exc elif exc.response.status_code == 401: - raise APIKeyError( - "You are not authorized to access this resource. Please check your API key." - ) from exc + msg = "You are not authorized to access this resource. Please check your API key." + raise APIKeyError(msg) from exc except Exception as exc: - raise ValueError(f"Unexpected error: {exc}") from exc + msg = f"Unexpected error: {exc}" + raise ValueError(msg) from exc try: if result and not metadata: if len(result) >= limit: @@ -549,9 +570,11 @@ async def get_list_component_response_model( comp_count = 0 except HTTPStatusError as exc: if exc.response.status_code == 403: - raise ForbiddenError("You are not authorized to access this public resource") + msg = "You are not authorized to access this public resource" + raise ForbiddenError(msg) elif exc.response.status_code == 401: - raise APIKeyError("You are not authorized to access this resource. Please check your API key.") + msg = "You are not authorized to access this resource. Please check your API key." + raise APIKeyError(msg) if store_api_key: # Now, from the result, we need to get the components diff --git a/src/backend/base/langflow/services/task/backends/celery.py b/src/backend/base/langflow/services/task/backends/celery.py index 45a3ed7b0b6b..e2ae1e86b0b8 100644 --- a/src/backend/base/langflow/services/task/backends/celery.py +++ b/src/backend/base/langflow/services/task/backends/celery.py @@ -18,7 +18,8 @@ def launch_task(self, task_func: Callable[..., Any], *args: Any, **kwargs: Any) from celery import Task # type: ignore if not hasattr(task_func, "delay"): - raise ValueError(f"Task function {task_func} does not have a delay method") + msg = f"Task function {task_func} does not have a delay method" + raise ValueError(msg) task: Task = task_func.delay(*args, **kwargs) return task.id, AsyncResult(task.id, app=self.celery_app) diff --git a/src/backend/base/langflow/services/task/service.py b/src/backend/base/langflow/services/task/service.py index 321ac9c742d2..3fca26e55479 100644 --- a/src/backend/base/langflow/services/task/service.py +++ b/src/backend/base/langflow/services/task/service.py @@ -66,7 +66,8 @@ async def launch_and_await_task( if not self.use_celery: return None, await task_func(*args, **kwargs) if not hasattr(task_func, "apply"): - raise ValueError(f"Task function {task_func} does not have an apply method") + msg = f"Task function {task_func} does not have an apply method" + raise ValueError(msg) task = task_func.apply(args=args, kwargs=kwargs) result = task.get() diff --git a/src/backend/base/langflow/services/telemetry/opentelemetry.py b/src/backend/base/langflow/services/telemetry/opentelemetry.py index 943d6d10ccc7..95465b2c25bb 100644 --- a/src/backend/base/langflow/services/telemetry/opentelemetry.py +++ b/src/backend/base/langflow/services/telemetry/opentelemetry.py @@ -81,11 +81,13 @@ def validate_labels(self, labels: Mapping[str, str]): Validate if the labels provided are valid """ if labels is None or len(labels) == 0: - raise ValueError("Labels must be provided for the metric") + msg = "Labels must be provided for the metric" + raise ValueError(msg) missing_labels = set(self.mandatory_labels) - set(labels.keys()) if missing_labels: - raise ValueError(f"Missing required labels: {missing_labels}") + msg = f"Missing required labels: {missing_labels}" + raise ValueError(msg) def __repr__(self): return f"Metric(name='{self.name}', description='{self.description}', type={self.type}, unit='{self.unit}')" @@ -115,7 +117,8 @@ def _add_metric(self, name: str, description: str, unit: str, metric_type: Metri metric = Metric(name=name, description=description, type=metric_type, unit=unit, labels=labels) self._metrics_registry[name] = metric if labels is None or len(labels) == 0: - raise ValueError("Labels must be provided for the metric upon registration") + msg = "Labels must be provided for the metric upon registration" + raise ValueError(msg) def _register_metric(self): """ @@ -156,7 +159,8 @@ def __init__(self, prometheus_enabled: bool = True): for name, metric in self._metrics_registry.items(): if name != metric.name: - raise ValueError(f"Key '{name}' does not match metric name '{metric.name}'") + msg = f"Key '{name}' does not match metric name '{metric.name}'" + raise ValueError(msg) with warnings.catch_warnings(): warnings.simplefilter("ignore") @@ -188,12 +192,14 @@ def _create_metric(self, metric): description=metric.description, ) else: - raise ValueError(f"Unknown metric type: {metric.type}") + msg = f"Unknown metric type: {metric.type}" + raise ValueError(msg) def validate_labels(self, metric_name: str, labels: Mapping[str, str]): reg = self._metrics_registry.get(metric_name) if reg is None: - raise ValueError(f"Metric '{metric_name}' is not registered") + msg = f"Metric '{metric_name}' is not registered" + raise ValueError(msg) reg.validate_labels(labels) def increment_counter(self, metric_name: str, labels: Mapping[str, str], value: float = 1.0): @@ -202,7 +208,8 @@ def increment_counter(self, metric_name: str, labels: Mapping[str, str], value: if isinstance(counter, Counter): counter.add(value, labels) else: - raise ValueError(f"Metric '{metric_name}' is not a counter") + msg = f"Metric '{metric_name}' is not a counter" + raise ValueError(msg) def up_down_counter(self, metric_name: str, value: float, labels: Mapping[str, str]): self.validate_labels(metric_name, labels) @@ -210,7 +217,8 @@ def up_down_counter(self, metric_name: str, value: float, labels: Mapping[str, s if isinstance(up_down_counter, UpDownCounter): up_down_counter.add(value, labels) else: - raise ValueError(f"Metric '{metric_name}' is not an up down counter") + msg = f"Metric '{metric_name}' is not an up down counter" + raise ValueError(msg) def update_gauge(self, metric_name: str, value: float, labels: Mapping[str, str]): self.validate_labels(metric_name, labels) @@ -218,7 +226,8 @@ def update_gauge(self, metric_name: str, value: float, labels: Mapping[str, str] if isinstance(gauge, ObservableGaugeWrapper): gauge.set_value(value, labels) else: - raise ValueError(f"Metric '{metric_name}' is not a gauge") + msg = f"Metric '{metric_name}' is not a gauge" + raise ValueError(msg) def observe_histogram(self, metric_name: str, value: float, labels: Mapping[str, str]): self.validate_labels(metric_name, labels) @@ -226,4 +235,5 @@ def observe_histogram(self, metric_name: str, value: float, labels: Mapping[str, if isinstance(histogram, Histogram): histogram.record(value, labels) else: - raise ValueError(f"Metric '{metric_name}' is not a histogram") + msg = f"Metric '{metric_name}' is not a histogram" + raise ValueError(msg) diff --git a/src/backend/base/langflow/services/tracing/langfuse.py b/src/backend/base/langflow/services/tracing/langfuse.py index 6f8e032f3aeb..85293d5a4c2e 100644 --- a/src/backend/base/langflow/services/tracing/langfuse.py +++ b/src/backend/base/langflow/services/tracing/langfuse.py @@ -38,7 +38,8 @@ def setup_langfuse(self) -> bool: config = self._get_config() if not all(config.values()): - raise ValueError("Missing Langfuse configuration") + msg = "Missing Langfuse configuration" + raise ValueError(msg) self._client = Langfuse(**config) self.trace = self._client.trace(id=str(self.trace_id), name=self.flow_id) diff --git a/src/backend/base/langflow/services/utils.py b/src/backend/base/langflow/services/utils.py index 6c3e6e7b1cfd..96ec30fce918 100644 --- a/src/backend/base/langflow/services/utils.py +++ b/src/backend/base/langflow/services/utils.py @@ -41,9 +41,11 @@ def get_or_create_super_user(session: Session, username, password, is_default): if user: if verify_password(password, user.password): - raise ValueError("User with superuser credentials exists but is not a superuser.") + msg = "User with superuser credentials exists but is not a superuser." + raise ValueError(msg) else: - raise ValueError("Incorrect superuser credentials") + msg = "Incorrect superuser credentials" + raise ValueError(msg) if is_default: logger.debug("Creating default superuser.") @@ -78,7 +80,8 @@ def setup_superuser(settings_service, session: Session): logger.debug("Superuser created successfully.") except Exception as exc: logger.exception(exc) - raise RuntimeError("Could not create superuser. Please create a superuser manually.") from exc + msg = "Could not create superuser. Please create a superuser manually." + raise RuntimeError(msg) from exc finally: settings_service.auth_settings.reset_credentials() @@ -107,7 +110,8 @@ def teardown_superuser(settings_service, session): except Exception as exc: logger.exception(exc) session.rollback() - raise RuntimeError("Could not remove default superuser.") from exc + msg = "Could not remove default superuser." + raise RuntimeError(msg) from exc async def teardown_services(): @@ -171,4 +175,5 @@ def initialize_services(fix_migration: bool = False, socketio_server=None): get_db_service().migrate_flows_if_auto_login() except Exception as exc: logger.error(f"Error migrating flows: {exc}") - raise RuntimeError("Error migrating flows") from exc + msg = "Error migrating flows" + raise RuntimeError(msg) from exc diff --git a/src/backend/base/langflow/services/variable/kubernetes.py b/src/backend/base/langflow/services/variable/kubernetes.py index 9d276ffce2f1..47400d82c964 100644 --- a/src/backend/base/langflow/services/variable/kubernetes.py +++ b/src/backend/base/langflow/services/variable/kubernetes.py @@ -55,7 +55,8 @@ def resolve_variable( ) -> tuple[str, str]: variables = self.kubernetes_secrets.get_secret(name=secret_name) if not variables: - raise ValueError(f"user_id {user_id} variable not found.") + msg = f"user_id {user_id} variable not found." + raise ValueError(msg) if name in variables: return name, variables[name] @@ -64,7 +65,8 @@ def resolve_variable( if credential_name in variables: return credential_name, variables[credential_name] else: - raise ValueError(f"user_id {user_id} variable name {name} not found.") + msg = f"user_id {user_id} variable name {name} not found." + raise ValueError(msg) def get_variable( self, @@ -76,10 +78,11 @@ def get_variable( secret_name = encode_user_id(user_id) key, value = self.resolve_variable(secret_name, user_id, name) if key.startswith(CREDENTIAL_TYPE + "_") and field == "session_id": # type: ignore - raise TypeError( + msg = ( f"variable {name} of type 'Credential' cannot be used in a Session ID field " "because its purpose is to prevent the exposure of values." ) + raise TypeError(msg) return value def list_variables( diff --git a/src/backend/base/langflow/services/variable/kubernetes_secrets.py b/src/backend/base/langflow/services/variable/kubernetes_secrets.py index 09a4fd40a745..6cb23b56119d 100644 --- a/src/backend/base/langflow/services/variable/kubernetes_secrets.py +++ b/src/backend/base/langflow/services/variable/kubernetes_secrets.py @@ -185,7 +185,8 @@ def encode_user_id(user_id: UUID | str) -> str: id = id[:253] if not all(c.isalnum() or c in "-_" for c in id): - raise ValueError(f"Invalid user_id: {id}") + msg = f"Invalid user_id: {id}" + raise ValueError(msg) # Ensure the name ends with an alphanumeric character while not id[-1].isalnum(): diff --git a/src/backend/base/langflow/services/variable/service.py b/src/backend/base/langflow/services/variable/service.py index b816a883b77c..b3eeb43ecd68 100644 --- a/src/backend/base/langflow/services/variable/service.py +++ b/src/backend/base/langflow/services/variable/service.py @@ -76,13 +76,15 @@ def get_variable( variable = session.exec(select(Variable).where(Variable.user_id == user_id, Variable.name == name)).first() if not variable or not variable.value: - raise ValueError(f"{name} variable not found.") + msg = f"{name} variable not found." + raise ValueError(msg) if variable.type == CREDENTIAL_TYPE and field == "session_id": # type: ignore - raise TypeError( + msg = ( f"variable {name} of type 'Credential' cannot be used in a Session ID field " "because its purpose is to prevent the exposure of values." ) + raise TypeError(msg) # we decrypt the value decrypted = auth_utils.decrypt_api_key(variable.value, settings_service=self.settings_service) @@ -104,7 +106,8 @@ def update_variable( ): variable = session.exec(select(Variable).where(Variable.user_id == user_id, Variable.name == name)).first() if not variable: - raise ValueError(f"{name} variable not found.") + msg = f"{name} variable not found." + raise ValueError(msg) encrypted = auth_utils.encrypt_api_key(value, settings_service=self.settings_service) variable.value = encrypted session.add(variable) @@ -143,14 +146,16 @@ def delete_variable( stmt = select(Variable).where(Variable.user_id == user_id).where(Variable.name == name) variable = session.exec(stmt).first() if not variable: - raise ValueError(f"{name} variable not found.") + msg = f"{name} variable not found." + raise ValueError(msg) session.delete(variable) session.commit() def delete_variable_by_id(self, user_id: UUID | str, variable_id: UUID, session: Session): variable = session.exec(select(Variable).where(Variable.user_id == user_id, Variable.id == variable_id)).first() if not variable: - raise ValueError(f"{variable_id} variable not found.") + msg = f"{variable_id} variable not found." + raise ValueError(msg) session.delete(variable) session.commit() diff --git a/src/backend/base/langflow/template/field/base.py b/src/backend/base/langflow/template/field/base.py index c767151f9ff1..904524b8a844 100644 --- a/src/backend/base/langflow/template/field/base.py +++ b/src/backend/base/langflow/template/field/base.py @@ -143,7 +143,8 @@ def serialize_display_name(self, value, _info): @field_validator("file_types") def validate_file_types(cls, value): if not isinstance(value, list): - raise ValueError("file_types must be a list") + msg = "file_types must be a list" + raise ValueError(msg) return [ (f".{file_type}" if isinstance(file_type, str) and not file_type.startswith(".") else file_type) for file_type in value @@ -159,7 +160,8 @@ def validate_type(cls, v): v = post_process_type(v)[0] v = format_type(v) elif not isinstance(v, str): - raise ValueError(f"type must be a string or a type, not {type(v)}") + msg = f"type must be a string or a type, not {type(v)}" + raise ValueError(msg) return v @@ -212,7 +214,8 @@ def validate_model(self): if self.value == UNDEFINED.value: self.value = UNDEFINED if self.name is None: - raise ValueError("name must be set") + msg = "name must be set" + raise ValueError(msg) if self.display_name is None: self.display_name = self.name return self diff --git a/src/backend/base/langflow/template/frontend_node/base.py b/src/backend/base/langflow/template/frontend_node/base.py index 03df48b9d03b..75f869c47f16 100644 --- a/src/backend/base/langflow/template/frontend_node/base.py +++ b/src/backend/base/langflow/template/frontend_node/base.py @@ -124,9 +124,8 @@ def validate_name_overlap(self) -> None: overlap = set(output_names).intersection(input_names) if overlap: overlap_str = ", ".join(f"'{x}'" for x in overlap) - raise ValueError( - f"There should be no overlap between input and output names. Names {overlap_str} are duplicated." - ) + msg = f"There should be no overlap between input and output names. Names {overlap_str} are duplicated." + raise ValueError(msg) def validate_attributes(self) -> None: # None of inputs, outputs, _artifacts, _results, logs, status, vertex, graph, display_name, description, @@ -175,7 +174,8 @@ def add_output_type(self, output_type: str | list[str]) -> None: def from_inputs(cls, **kwargs): """Create a frontend node from inputs.""" if "inputs" not in kwargs: - raise ValueError("Missing 'inputs' argument.") + msg = "Missing 'inputs' argument." + raise ValueError(msg) if "_outputs_map" in kwargs: kwargs["outputs"] = kwargs.pop("_outputs_map") inputs = kwargs.pop("inputs") diff --git a/src/backend/base/langflow/template/template/base.py b/src/backend/base/langflow/template/template/base.py index c4f5966d8163..dab6aa94fcd6 100644 --- a/src/backend/base/langflow/template/template/base.py +++ b/src/backend/base/langflow/template/template/base.py @@ -51,7 +51,8 @@ def from_dict(cls, data: dict) -> "Template": try: _input = instantiate_input(input_type, value) except Exception as e: - raise ValueError(f"Error instantiating input {input_type}: {e}") + msg = f"Error instantiating input {input_type}: {e}" + raise ValueError(msg) else: _input = Input(**value) @@ -76,7 +77,8 @@ def get_field(self, field_name: str) -> Input: """Returns the field with the given name.""" field = next((field for field in self.fields if field.name == field_name), None) if field is None: - raise ValueError(f"Field {field_name} not found in template {self.type_name}") + msg = f"Field {field_name} not found in template {self.type_name}" + raise ValueError(msg) return cast(Input, field) def update_field(self, field_name: str, field: Input) -> None: @@ -85,7 +87,8 @@ def update_field(self, field_name: str, field: Input) -> None: if template_field.name == field_name: self.fields[idx] = field return - raise ValueError(f"Field {field_name} not found in template {self.type_name}") + msg = f"Field {field_name} not found in template {self.type_name}" + raise ValueError(msg) def upsert_field(self, field_name: str, field: Input) -> None: """Updates the field with the given name or adds it if it doesn't exist.""" diff --git a/src/backend/base/langflow/utils/concurrency.py b/src/backend/base/langflow/utils/concurrency.py index a89a4988aacf..e22e958c8099 100644 --- a/src/backend/base/langflow/utils/concurrency.py +++ b/src/backend/base/langflow/utils/concurrency.py @@ -56,7 +56,8 @@ def _validate_key(self, key: str) -> bool: @contextmanager def lock(self, key: str): if not self._validate_key(key): - raise ValueError(f"Invalid key: {key}") + msg = f"Invalid key: {key}" + raise ValueError(msg) lock = FileLock(self.locks_dir / key) with lock: diff --git a/src/backend/base/langflow/utils/payload.py b/src/backend/base/langflow/utils/payload.py index 8574e4db9974..4a639690cbdf 100644 --- a/src/backend/base/langflow/utils/payload.py +++ b/src/backend/base/langflow/utils/payload.py @@ -77,7 +77,8 @@ def build_json(root, graph) -> dict: children.extend(node_children) if value["required"] and not children: - raise ValueError(f"No child with type {node_type} found") + msg = f"No child with type {node_type} found" + raise ValueError(msg) values = [build_json(child, graph) for child in children] value = ( list(values) if value["list"] else next(iter(values), None) # type: ignore diff --git a/src/backend/base/langflow/utils/schemas.py b/src/backend/base/langflow/utils/schemas.py index 83345550236d..d14c0a5f5ada 100644 --- a/src/backend/base/langflow/utils/schemas.py +++ b/src/backend/base/langflow/utils/schemas.py @@ -36,14 +36,16 @@ def validate_files(cls, files): for file in files: if not isinstance(file, dict): - raise ValueError("Files must be a list of dictionaries.") + msg = "Files must be a list of dictionaries." + raise ValueError(msg) if not all(key in file for key in ["path", "name", "type"]): # If any of the keys are missing, we should extract the # values from the file path path = file.get("path") if not path: - raise ValueError("File path is required.") + msg = "File path is required." + raise ValueError(msg) name = file.get("name") if not name: @@ -62,7 +64,8 @@ def validate_files(cls, files): _type = file_type break if not _type: - raise ValueError("File type is required.") + msg = "File type is required." + raise ValueError(msg) file["type"] = _type return files diff --git a/src/backend/base/langflow/utils/util.py b/src/backend/base/langflow/utils/util.py index c343859b7345..70cb7946529e 100644 --- a/src/backend/base/langflow/utils/util.py +++ b/src/backend/base/langflow/utils/util.py @@ -30,7 +30,8 @@ def build_template_from_function(name: str, type_to_loader_dict: dict, add_funct # Raise error if name is not in chains if name not in classes: - raise ValueError(f"{name} not found") + msg = f"{name} not found" + raise ValueError(msg) for _type, v in type_to_loader_dict.items(): if v.__annotations__["return"].__name__ == name: @@ -81,7 +82,8 @@ def build_template_from_method( # Raise error if class_name is not in classes if class_name not in classes: - raise ValueError(f"{class_name} not found.") + msg = f"{class_name} not found." + raise ValueError(msg) for _type, v in type_to_cls_dict.items(): if v.__name__ == class_name: @@ -89,7 +91,8 @@ def build_template_from_method( # Check if the method exists in this class if not hasattr(_class, method_name): - raise ValueError(f"Method {method_name} not found in class {class_name}") + msg = f"Method {method_name} not found in class {class_name}" + raise ValueError(msg) # Get the method method = getattr(_class, method_name) diff --git a/src/backend/base/langflow/utils/validate.py b/src/backend/base/langflow/utils/validate.py index 0ebfab2a1b97..75cf1b6f1747 100644 --- a/src/backend/base/langflow/utils/validate.py +++ b/src/backend/base/langflow/utils/validate.py @@ -70,7 +70,8 @@ def eval_function(function_string: str): None, ) if function_object is None: - raise ValueError("Function string does not contain a function") + msg = "Function string does not contain a function" + raise ValueError(msg) return function_object @@ -91,7 +92,8 @@ def execute_function(code, function_name, *args, **kwargs): ) exec_globals[alias.asname or alias.name] = importlib.import_module(alias.name) except ModuleNotFoundError as e: - raise ModuleNotFoundError(f"Module {alias.name} not found. Please install it and try again.") from e + msg = f"Module {alias.name} not found. Please install it and try again." + raise ModuleNotFoundError(msg) from e function_code = next( node for node in module.body if isinstance(node, ast.FunctionDef) and node.name == function_name @@ -101,7 +103,8 @@ def execute_function(code, function_name, *args, **kwargs): try: exec(code_obj, exec_globals, locals()) except Exception as exc: - raise ValueError("Function string does not contain a function") from exc + msg = "Function string does not contain a function" + raise ValueError(msg) from exc # Add the function to the exec_globals dictionary exec_globals[function_name] = locals()[function_name] @@ -126,7 +129,8 @@ class TypeIgnore(ast.AST): try: exec_globals[alias.asname or alias.name] = importlib.import_module(alias.name) except ModuleNotFoundError as e: - raise ModuleNotFoundError(f"Module {alias.name} not found. Please install it and try again.") from e + msg = f"Module {alias.name} not found. Please install it and try again." + raise ModuleNotFoundError(msg) from e function_code = next( node for node in module.body if isinstance(node, ast.FunctionDef) and node.name == function_name @@ -206,14 +210,16 @@ def prepare_global_scope(code, module): try: exec_globals[alias.asname or alias.name] = importlib.import_module(alias.name) except ModuleNotFoundError as e: - raise ModuleNotFoundError(f"Module {alias.name} not found. Please install it and try again.") from e + msg = f"Module {alias.name} not found. Please install it and try again." + raise ModuleNotFoundError(msg) from e elif isinstance(node, ast.ImportFrom) and node.module is not None: try: imported_module = importlib.import_module(node.module) for alias in node.names: exec_globals[alias.name] = getattr(imported_module, alias.name) except ModuleNotFoundError: - raise ModuleNotFoundError(f"Module {node.module} not found. Please install it and try again") + msg = f"Module {node.module} not found. Please install it and try again" + raise ModuleNotFoundError(msg) return exec_globals @@ -305,7 +311,8 @@ def extract_function_name(code): for node in module.body: if isinstance(node, ast.FunctionDef): return node.name - raise ValueError("No function definition found in the code string") + msg = "No function definition found in the code string" + raise ValueError(msg) def extract_class_name(code): @@ -313,4 +320,5 @@ def extract_class_name(code): for node in module.body: if isinstance(node, ast.ClassDef): return node.name - raise ValueError("No class definition found in the code string") + msg = "No class definition found in the code string" + raise ValueError(msg) diff --git a/src/backend/base/langflow/utils/version.py b/src/backend/base/langflow/utils/version.py index bc167f2f2d33..28ff54479f91 100644 --- a/src/backend/base/langflow/utils/version.py +++ b/src/backend/base/langflow/utils/version.py @@ -46,7 +46,8 @@ def _get_version_info(): pass if __version__ is None: - raise ValueError(f"Package not found from options {package_options}") + msg = f"Package not found from options {package_options}" + raise ValueError(msg) VERSION_INFO = _get_version_info() diff --git a/src/backend/base/langflow/worker.py b/src/backend/base/langflow/worker.py index 095b1e99a236..46a0a665b6c1 100644 --- a/src/backend/base/langflow/worker.py +++ b/src/backend/base/langflow/worker.py @@ -34,4 +34,5 @@ def process_graph_cached_task( clear_cache=False, session_id=None, ) -> dict[str, Any]: - raise NotImplementedError("This task is not implemented yet") + msg = "This task is not implemented yet" + raise NotImplementedError(msg) diff --git a/src/backend/base/pyproject.toml b/src/backend/base/pyproject.toml index 939d80f038e9..ce13743023d1 100644 --- a/src/backend/base/pyproject.toml +++ b/src/backend/base/pyproject.toml @@ -155,6 +155,7 @@ select = [ "COM", "DJ", "E", + "EM", "F", "FLY", "FURB",