From bf7cdd28b04ff04743564523f8897cad220bb766 Mon Sep 17 00:00:00 2001 From: Christophe Bornet Date: Tue, 1 Oct 2024 13:35:08 +0200 Subject: [PATCH] Add various ruff rules --- src/backend/base/langflow/__main__.py | 2 +- .../base/langflow/base/agents/agent.py | 2 -- src/backend/base/langflow/base/data/utils.py | 2 +- .../base/document_transformers/model.py | 2 -- .../base/langchain_utilities/model.py | 2 -- .../base/langflow/base/textsplitters/model.py | 1 - .../langflow/components/chains/RetrievalQA.py | 2 +- .../AssemblyAIStartTranscript.py | 2 +- .../custom/code_parser/code_parser.py | 1 - src/backend/base/langflow/custom/utils.py | 2 +- .../base/langflow/inputs/input_mixin.py | 2 +- .../base/langflow/services/storage/local.py | 18 +++++++---- .../base/langflow/services/storage/s3.py | 1 - .../base/langflow/services/store/service.py | 8 ++--- src/backend/base/pyproject.toml | 30 ++++++++++++++++++- 15 files changed, 52 insertions(+), 25 deletions(-) diff --git a/src/backend/base/langflow/__main__.py b/src/backend/base/langflow/__main__.py index 695e5b95c0c6..8862466122f6 100644 --- a/src/backend/base/langflow/__main__.py +++ b/src/backend/base/langflow/__main__.py @@ -454,7 +454,7 @@ def migration( if not typer.confirm( "This will delete all data necessary to fix migrations. Are you sure you want to continue?" ): - raise typer.Abort() + raise typer.Abort initialize_services(fix_migration=fix) db_service = get_db_service() diff --git a/src/backend/base/langflow/base/agents/agent.py b/src/backend/base/langflow/base/agents/agent.py index 0402f96b5d56..554e7241389d 100644 --- a/src/backend/base/langflow/base/agents/agent.py +++ b/src/backend/base/langflow/base/agents/agent.py @@ -50,7 +50,6 @@ class LCAgentComponent(Component): @abstractmethod def build_agent(self) -> AgentExecutor: """Create the agent.""" - pass async def message_response(self) -> Message: """Run the agent and return the response.""" @@ -156,4 +155,3 @@ async def run_agent( @abstractmethod def create_agent_runnable(self) -> Runnable: """Create the agent.""" - pass diff --git a/src/backend/base/langflow/base/data/utils.py b/src/backend/base/langflow/base/data/utils.py index 612bee05075a..7553d047cb52 100644 --- a/src/backend/base/langflow/base/data/utils.py +++ b/src/backend/base/langflow/base/data/utils.py @@ -140,7 +140,7 @@ def parse_text_file_to_data(file_path: str, silent_errors: bool) -> Data | None: text = [normalize_text(item) if isinstance(item, str) else item for item in text] text = orjson.dumps(text).decode("utf-8") - elif file_path.endswith(".yaml") or file_path.endswith(".yml"): + elif file_path.endswith((".yaml", ".yml")): text = yaml.safe_load(text) elif file_path.endswith(".xml"): xml_element = ET.fromstring(text) diff --git a/src/backend/base/langflow/base/document_transformers/model.py b/src/backend/base/langflow/base/document_transformers/model.py index d698f7e916a8..41f659067c0a 100644 --- a/src/backend/base/langflow/base/document_transformers/model.py +++ b/src/backend/base/langflow/base/document_transformers/model.py @@ -39,11 +39,9 @@ def get_data_input(self) -> Any: """ Get the data input. """ - pass @abstractmethod def build_document_transformer(self) -> BaseDocumentTransformer: """ Build the text splitter. """ - pass diff --git a/src/backend/base/langflow/base/langchain_utilities/model.py b/src/backend/base/langflow/base/langchain_utilities/model.py index 49772b2704f7..7e41a6a0e724 100644 --- a/src/backend/base/langflow/base/langchain_utilities/model.py +++ b/src/backend/base/langflow/base/langchain_utilities/model.py @@ -28,11 +28,9 @@ def run_model(self) -> Data | list[Data]: """ Run model and return the output. """ - pass @abstractmethod def build_tool(self) -> Tool | Sequence[Tool]: """ Build the tool. """ - pass diff --git a/src/backend/base/langflow/base/textsplitters/model.py b/src/backend/base/langflow/base/textsplitters/model.py index 413d73d6fc50..058e21f426ce 100644 --- a/src/backend/base/langflow/base/textsplitters/model.py +++ b/src/backend/base/langflow/base/textsplitters/model.py @@ -26,4 +26,3 @@ def build_text_splitter(self) -> TextSplitter: """ Build the text splitter. """ - pass diff --git a/src/backend/base/langflow/components/chains/RetrievalQA.py b/src/backend/base/langflow/components/chains/RetrievalQA.py index c6ef848ec0c1..9c656ecb3107 100644 --- a/src/backend/base/langflow/components/chains/RetrievalQA.py +++ b/src/backend/base/langflow/components/chains/RetrievalQA.py @@ -58,7 +58,7 @@ def invoke_chain(self) -> Message: result_str = str(result.get("result", "")) if self.return_source_documents and len(source_docs): references_str = self.create_references_from_data(source_docs) - result_str = "\n".join([result_str, references_str]) + result_str = f"{result_str}\n{references_str}" # put the entire result to debug history, query and content self.status = {**result, "source_documents": source_docs, "output": result_str} return result_str diff --git a/src/backend/base/langflow/components/documentloaders/AssemblyAIStartTranscript.py b/src/backend/base/langflow/components/documentloaders/AssemblyAIStartTranscript.py index ba1d4abd2b1b..626fc2f12b4e 100644 --- a/src/backend/base/langflow/components/documentloaders/AssemblyAIStartTranscript.py +++ b/src/backend/base/langflow/components/documentloaders/AssemblyAIStartTranscript.py @@ -91,7 +91,7 @@ class AssemblyAITranscriptionJobCreator(Component): name="language_code", display_name="Language", info=""" - The language of the audio file. Can be set manually if automatic language detection is disabled. + The language of the audio file. Can be set manually if automatic language detection is disabled. See https://www.assemblyai.com/docs/getting-started/supported-languages for a list of supported language codes. """, ), diff --git a/src/backend/base/langflow/custom/code_parser/code_parser.py b/src/backend/base/langflow/custom/code_parser/code_parser.py index 9ceea82a34b7..66f29d6b7239 100644 --- a/src/backend/base/langflow/custom/code_parser/code_parser.py +++ b/src/backend/base/langflow/custom/code_parser/code_parser.py @@ -343,7 +343,6 @@ def parse_classes(self, node: ast.ClassDef) -> None: nodes.append(class_node) except Exception as exc: logger.error(f"Error finding base class node: {exc}") - pass nodes.insert(0, node) class_details = ClassCodeDetails( name=node.name, diff --git a/src/backend/base/langflow/custom/utils.py b/src/backend/base/langflow/custom/utils.py index 10ee795c29a9..31f825a832e7 100644 --- a/src/backend/base/langflow/custom/utils.py +++ b/src/backend/base/langflow/custom/utils.py @@ -128,7 +128,7 @@ def get_field_properties(extra_field): def process_type(field_type: str): - if field_type.startswith("list") or field_type.startswith("List"): + if field_type.startswith(("list", "List")): return extract_inner_type(field_type) # field_type is a string can be Prompt or Code too diff --git a/src/backend/base/langflow/inputs/input_mixin.py b/src/backend/base/langflow/inputs/input_mixin.py index 054a3de5a924..0e095d40f2a7 100644 --- a/src/backend/base/langflow/inputs/input_mixin.py +++ b/src/backend/base/langflow/inputs/input_mixin.py @@ -18,7 +18,7 @@ class FieldTypes(str, Enum): TEXT = "str" INTEGER = "int" - PASSWORD = "str" + PASSWORD = "str" # noqa: PIE796 FLOAT = "float" BOOLEAN = "bool" DICT = "dict" diff --git a/src/backend/base/langflow/services/storage/local.py b/src/backend/base/langflow/services/storage/local.py index 1c3a67daf635..1044a76fe278 100644 --- a/src/backend/base/langflow/services/storage/local.py +++ b/src/backend/base/langflow/services/storage/local.py @@ -1,3 +1,4 @@ +import asyncio from pathlib import Path from loguru import logger @@ -33,9 +34,12 @@ async def save_file(self, flow_id: str, file_name: str, data: bytes): folder_path.mkdir(parents=True, exist_ok=True) file_path = folder_path / file_name - try: + def write_file(file_path: Path, data: bytes) -> None: with open(file_path, "wb") as f: f.write(data) + + try: + await asyncio.get_event_loop().run_in_executor(None, write_file, file_path, data) logger.info(f"File {file_name} saved successfully in flow {flow_id}.") except Exception as e: logger.error(f"Error saving file {file_name} in flow {flow_id}: {e}") @@ -55,9 +59,13 @@ async def get_file(self, flow_id: str, file_name: str) -> bytes: logger.warning(f"File {file_name} not found in flow {flow_id}.") raise FileNotFoundError(f"File {file_name} not found in flow {flow_id}") - with open(file_path, "rb") as f: - logger.debug(f"File {file_name} retrieved successfully from flow {flow_id}.") - return f.read() + def read_file(file_path: Path) -> bytes: + with open(file_path, "rb") as f: + return f.read() + + content = await asyncio.get_event_loop().run_in_executor(None, read_file, file_path) + logger.debug(f"File {file_name} retrieved successfully from flow {flow_id}.") + return content async def list_files(self, flow_id: str): """ @@ -92,4 +100,4 @@ async def delete_file(self, flow_id: str, file_name: str): async def teardown(self): """Perform any cleanup operations when the service is being torn down.""" - pass # No specific teardown actions required for local + # No specific teardown actions required for local diff --git a/src/backend/base/langflow/services/storage/s3.py b/src/backend/base/langflow/services/storage/s3.py index 4426f377c74a..7292b7a0be9c 100644 --- a/src/backend/base/langflow/services/storage/s3.py +++ b/src/backend/base/langflow/services/storage/s3.py @@ -86,4 +86,3 @@ async def delete_file(self, folder: str, file_name: str): async def teardown(self): """Perform any cleanup operations when the service is being torn down.""" # No specific teardown actions required for S3 storage at the moment. - pass diff --git a/src/backend/base/langflow/services/store/service.py b/src/backend/base/langflow/services/store/service.py index e827b28a984c..0d9a3283632c 100644 --- a/src/backend/base/langflow/services/store/service.py +++ b/src/backend/base/langflow/services/store/service.py @@ -330,7 +330,7 @@ async def get_components_in_users_collection(self, component_ids: list[str], api async def download(self, api_key: str, component_id: UUID) -> DownloadComponentResponse: url = f"{self.components_url}/{component_id}" - params = {"fields": ",".join(["id", "name", "description", "data", "is_component", "metadata"])} + params = {"fields": "id,name,description,data,is_component,metadata"} if not self.download_webhook_url: raise ValueError("DOWNLOAD_WEBHOOK_URL is not set") component, _ = await self._get(url, api_key, params) @@ -420,14 +420,14 @@ async def update( async def get_tags(self) -> list[dict[str, Any]]: url = f"{self.base_url}/items/tags" - params = {"fields": ",".join(["id", "name"])} + params = {"fields": "id,name"} tags, _ = await self._get(url, api_key=None, params=params) return tags async def get_user_likes(self, api_key: str) -> list[dict[str, Any]]: url = f"{self.base_url}/users/me" params = { - "fields": ",".join(["id", "likes"]), + "fields": "id,likes", } likes, _ = await self._get(url, api_key, params) return likes @@ -436,7 +436,7 @@ async def get_component_likes_count(self, component_id: str, api_key: str | None url = f"{self.components_url}/{component_id}" params = { - "fields": ",".join(["id", "count(liked_by)"]), + "fields": "id,count(liked_by)", } result, _ = await self._get(url, api_key=api_key, params=params) if len(result) == 0: diff --git a/src/backend/base/pyproject.toml b/src/backend/base/pyproject.toml index 95c57fc142a5..062a299e2d38 100644 --- a/src/backend/base/pyproject.toml +++ b/src/backend/base/pyproject.toml @@ -148,7 +148,35 @@ exclude = ["langflow/alembic"] line-length = 120 [tool.ruff.lint] -select = ["C4", "E4", "E7", "E9", "F", "I", "UP"] +select = [ + "ASYNC", + "C4", + "COM", + "DJ", + "E4", "E7", "E9", + "F", + "FLY", + "FURB", + "I", + "ICN", + "INT", + "LOG", + "NPY", + "PD", + "PIE", + "Q", + "RSE", + "SLOT", + "T10", + "TID", + "UP", + "W", + "YTT" +] +ignore = [ + "COM812", # Messes with the formatter +] + [build-system] requires = ["hatchling"]