From 46fec804602f60770da9a8468c690a05d3724d66 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 5 Aug 2024 16:00:05 -0300 Subject: [PATCH] Refactor: Add pytest fixtures for memory_chatbot_graph tests and improve test structure --- .../starter_projects/test_memory_chatbot.py | 90 ++++++++++++++++++- 1 file changed, 88 insertions(+), 2 deletions(-) diff --git a/src/backend/tests/unit/initial_setup/starter_projects/test_memory_chatbot.py b/src/backend/tests/unit/initial_setup/starter_projects/test_memory_chatbot.py index 3fd135de288b..3957c2c7c27d 100644 --- a/src/backend/tests/unit/initial_setup/starter_projects/test_memory_chatbot.py +++ b/src/backend/tests/unit/initial_setup/starter_projects/test_memory_chatbot.py @@ -1,5 +1,7 @@ from collections import deque +import pytest + from langflow.components.helpers.Memory import MemoryComponent from langflow.components.inputs.ChatInput import ChatInput from langflow.components.models.OpenAIModel import OpenAIModelComponent @@ -7,9 +9,16 @@ from langflow.components.prompts.Prompt import PromptComponent from langflow.graph import Graph from langflow.graph.graph.constants import Finish +from langflow.graph.graph.schema import GraphDump + +@pytest.fixture +def client(): + pass -def test_memory_chatbot(): + +@pytest.fixture +def memory_chatbot_graph(): session_id = "test_session_id" template = """{context} @@ -32,10 +41,87 @@ def test_memory_chatbot(): chat_output.set(input_value=openai_component.text_response) graph = Graph(chat_input, chat_output) + return graph + + +def test_memory_chatbot(memory_chatbot_graph): # Now we run step by step expected_order = deque(["chat_input", "chat_memory", "prompt", "openai", "chat_output"]) for step in expected_order: - result = graph.step() + result = memory_chatbot_graph.step() if isinstance(result, Finish): break assert step == result.vertex.id + + +def test_memory_chatbot_dump_structure(memory_chatbot_graph: Graph): + # Now we run step by step + graph_dict = memory_chatbot_graph.dump( + name="Memory Chatbot", description="A memory chatbot", endpoint_name="membot" + ) + assert isinstance(graph_dict, dict) + # Test structure + assert "data" in graph_dict + assert "is_component" in graph_dict + + data_dict = graph_dict["data"] + assert "nodes" in data_dict + assert "edges" in data_dict + assert "description" in graph_dict + assert "endpoint_name" in graph_dict + + # Test data + nodes = data_dict["nodes"] + edges = data_dict["edges"] + description = graph_dict["description"] + endpoint_name = graph_dict["endpoint_name"] + + assert len(nodes) == 5 + assert len(edges) == 4 + assert description is not None + assert endpoint_name is not None + + +def test_memory_chatbot_dump_components_and_edges(memory_chatbot_graph: Graph): + # Check all components and edges were dumped correctly + graph_dict: GraphDump = memory_chatbot_graph.dump( + name="Memory Chatbot", description="A memory chatbot", endpoint_name="membot" + ) + + data_dict = graph_dict["data"] + nodes = data_dict["nodes"] + edges = data_dict["edges"] + + # sort the nodes by id + nodes = sorted(nodes, key=lambda x: x["id"]) + + # Check each node + assert nodes[0]["data"]["type"] == "ChatInput" + assert nodes[0]["id"] == "chat_input" + + assert nodes[1]["data"]["type"] == "MemoryComponent" + assert nodes[1]["id"] == "chat_memory" + + assert nodes[2]["data"]["type"] == "ChatOutput" + assert nodes[2]["id"] == "chat_output" + + assert nodes[3]["data"]["type"] == "OpenAIModelComponent" + assert nodes[3]["id"] == "openai" + + assert nodes[4]["data"]["type"] == "PromptComponent" + assert nodes[4]["id"] == "prompt" + + # Check edges + expected_edges = [ + ("chat_input", "prompt"), + ("chat_memory", "prompt"), + ("prompt", "openai"), + ("openai", "chat_output"), + ] + + assert len(edges) == len(expected_edges) + + for edge in edges: + source = edge["source"] + target = edge["target"] + assert (source, target) in expected_edges, edge