Skip to content

Commit

Permalink
chore: refactor and add components integration tests (#3607)
Browse files Browse the repository at this point in the history
* improve inegration tests

* add fixes

* [autofix.ci] apply automated fixes

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
  • Loading branch information
nicoloboschi and autofix-ci[bot] authored Sep 2, 2024
1 parent 4ee2535 commit 96872f3
Show file tree
Hide file tree
Showing 32 changed files with 528 additions and 136 deletions.
24 changes: 22 additions & 2 deletions .github/workflows/python_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,6 @@ jobs:
python-version: ${{ fromJson(inputs.python-versions || '["3.10", "3.11", "3.12"]' ) }}
splitCount: [5]
group: [1, 2, 3, 4, 5]
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
steps:
- uses: actions/checkout@v4
with:
Expand All @@ -59,6 +57,28 @@ jobs:
timeout_minutes: 12
max_attempts: 2
command: make unit_tests async=false args="--splits ${{ matrix.splitCount }} --group ${{ matrix.group }}"
integration-tests:
name: Integration Tests - Python ${{ matrix.python-version }}
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ${{ fromJson(inputs.python-versions || '["3.10", "3.11", "3.12"]' ) }}
steps:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.branch || github.ref }}
- name: Set up Python ${{ matrix.python-version }} + Poetry ${{ env.POETRY_VERSION }}
uses: "./.github/actions/poetry_caching"
with:
python-version: ${{ matrix.python-version }}
poetry-version: ${{ env.POETRY_VERSION }}
cache-key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ hashFiles('**/poetry.lock') }}
- name: Install Python dependencies
run: |
poetry env use ${{ matrix.python-version }}
poetry install
- name: Run integration tests
run: make integration_tests_no_api_keys

test-cli:
name: Test CLI - Python ${{ matrix.python-version }}
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/scheduled_integration_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ jobs:
run: |
poetry env use ${{ matrix.python-version }}
poetry install
- name: Run integration tests
timeout-minutes: 12
- name: Run integration tests with api keys
timeout-minutes: 20
run: |
make integration_tests
make integration_tests_api_keys
12 changes: 11 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -148,11 +148,21 @@ else
$(args)
endif

integration_tests: ## run integration tests
integration_tests:
poetry run pytest src/backend/tests/integration \
--instafail -ra \
$(args)

integration_tests_no_api_keys:
poetry run pytest src/backend/tests/integration \
--instafail -ra -m "not api_key_required" \
$(args)

integration_tests_api_keys:
poetry run pytest src/backend/tests/integration \
--instafail -ra -m "api_key_required" \
$(args)

tests: ## run unit, integration, coverage tests
@echo 'Running Unit Tests...'
make unit_tests
Expand Down
23 changes: 21 additions & 2 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@ astra-assistants = "^2.1.0.10"
composio-langchain = "^0.5.8"
spider-client = "^0.0.27"
nltk = "^3.9.1"
bson = "^0.5.10"


[tool.poetry.group.dev.dependencies]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ def map_inputs(self, inputs: list["InputTypes"]):
for input_ in inputs:
if input_.name is None:
raise ValueError("Input name cannot be None.")
self._inputs[input_.name] = input_
self._inputs[input_.name] = deepcopy(input_)

def validate(self, params: dict):
"""
Expand Down Expand Up @@ -496,6 +496,8 @@ def to_frontend_node(self):
#! works and then update this later
field_config = self.get_template_config(self)
frontend_node = ComponentFrontendNode.from_inputs(**field_config)
for key, value in self._inputs.items():
frontend_node.set_field_load_from_db_in_template(key, False)
self._map_parameters_on_frontend_node(frontend_node)

frontend_node_dict = frontend_node.to_dict(keep_name=False)
Expand Down Expand Up @@ -532,7 +534,9 @@ def to_frontend_node(self):
"data": {
"node": frontend_node.to_dict(keep_name=False),
"type": self.name or self.__class__.__name__,
}
"id": self._id,
},
"id": self._id,
}
return data

Expand Down
44 changes: 30 additions & 14 deletions src/backend/base/langflow/graph/graph/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,33 +202,35 @@ def add_nodes_and_edges(self, nodes: list[NodeData], edges: list[EdgeData]):
self._edges = self._graph_data["edges"]
self.initialize()

def add_component(self, _id: str, component: "Component"):
if _id in self.vertex_map:
return
def add_component(self, component: "Component", component_id: Optional[str] = None) -> str:
component_id = component_id or component._id
if component_id in self.vertex_map:
return component_id
component._id = component_id
if component_id in self.vertex_map:
raise ValueError(f"Component ID {component_id} already exists")
frontend_node = component.to_frontend_node()
frontend_node["data"]["id"] = _id
frontend_node["id"] = _id
self._vertices.append(frontend_node)
vertex = self._create_vertex(frontend_node)
vertex.add_component_instance(component)
self.vertices.append(vertex)
self.vertex_map[_id] = vertex

self._add_vertex(vertex)
if component._edges:
for edge in component._edges:
self._add_edge(edge)

if component._components:
for _component in component._components:
self.add_component(_component._id, _component)
self.add_component(_component)

return component_id

def _set_start_and_end(self, start: "Component", end: "Component"):
if not hasattr(start, "to_frontend_node"):
raise TypeError(f"start must be a Component. Got {type(start)}")
if not hasattr(end, "to_frontend_node"):
raise TypeError(f"end must be a Component. Got {type(end)}")
self.add_component(start._id, start)
self.add_component(end._id, end)
self.add_component(start, start._id)
self.add_component(end, end._id)

def add_component_edge(self, source_id: str, output_input_tuple: tuple[str, str], target_id: str):
source_vertex = self.get_vertex(source_id)
Expand All @@ -242,6 +244,18 @@ def add_component_edge(self, source_id: str, output_input_tuple: tuple[str, str]
raise ValueError(f"Source vertex {source_id} does not have a custom component.")
if target_vertex._custom_component is None:
raise ValueError(f"Target vertex {target_id} does not have a custom component.")

try:
input_field = target_vertex.get_input(input_name)
input_types = input_field.input_types
input_field_type = str(input_field.field_type)
except ValueError:
input_field = target_vertex.data.get("node", {}).get("template", {}).get(input_name)
if not input_field:
raise ValueError(f"Input field {input_name} not found in target vertex {target_id}")
input_types = input_field.get("input_types", [])
input_field_type = input_field.get("type", "")

edge_data: EdgeData = {
"source": source_id,
"target": target_id,
Expand All @@ -256,8 +270,8 @@ def add_component_edge(self, source_id: str, output_input_tuple: tuple[str, str]
"targetHandle": {
"fieldName": input_name,
"id": target_vertex.id,
"inputTypes": target_vertex.get_input(input_name).input_types,
"type": str(target_vertex.get_input(input_name).field_type),
"inputTypes": input_types,
"type": input_field_type,
},
},
}
Expand Down Expand Up @@ -1397,7 +1411,7 @@ async def process(self, fallback_to_env_vars: bool, start_component_id: str | No
tasks.append(task)
vertex_task_run_count[vertex_id] = vertex_task_run_count.get(vertex_id, 0) + 1

logger.debug(f"Running layer {layer_index} with {len(tasks)} tasks")
logger.debug(f"Running layer {layer_index} with {len(tasks)} tasks, {current_batch}")
try:
next_runnable_vertices = await self._execute_tasks(tasks, lock=lock)
except Exception as e:
Expand Down Expand Up @@ -1463,6 +1477,8 @@ async def _execute_tasks(self, tasks: list[asyncio.Task], lock: asyncio.Lock) ->
# This could usually happen with input vertices like ChatInput
self.run_manager.remove_vertex_from_runnables(v.id)

logger.debug(f"Vertex {v.id}, result: {v._built_result}, object: {v._built_object}")

for v in vertices:
next_runnable_vertices = await self.get_next_runnable_vertices(lock, vertex=v, cache=False)
results.extend(next_runnable_vertices)
Expand Down
4 changes: 2 additions & 2 deletions src/backend/base/langflow/interface/initialize/loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,8 +112,8 @@ def update_params_with_load_from_db_fields(
try:
key = custom_component.variables(params[field], field)
except ValueError as e:
# check if "User id is not set" is in the error message
if "User id is not set" in str(e) and not fallback_to_env_vars:
# check if "User id is not set" is in the error message, this is an internal bug
if "User id is not set" in str(e):
raise e
logger.debug(str(e))
if fallback_to_env_vars and key is None:
Expand Down
3 changes: 1 addition & 2 deletions src/backend/base/langflow/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
create_or_update_starter_projects,
initialize_super_user_if_needed,
load_flows_from_directory,
download_nltk_resources,
)
from langflow.interface.types import get_and_cache_all_types_dict
from langflow.interface.utils import setup_llm_caching
Expand Down Expand Up @@ -182,7 +181,7 @@ async def exception_handler(request: Request, exc: Exception):
FastAPIInstrumentor.instrument_app(app)

# Get necessary NLTK packages
download_nltk_resources()
# download_nltk_resources()

return app

Expand Down
6 changes: 6 additions & 0 deletions src/backend/base/langflow/template/frontend_node/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,3 +184,9 @@ def set_field_value_in_template(self, field_name, value):
if field.name == field_name:
field.value = value
break

def set_field_load_from_db_in_template(self, field_name, value):
for field in self.template.fields:
if field.name == field_name and hasattr(field, "load_from_db"):
field.load_from_db = value
break
File renamed without changes.
34 changes: 34 additions & 0 deletions src/backend/tests/api_keys.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
import os.path

# we need to import tmpdir


def get_required_env_var(var: str) -> str:
"""
Get the value of the specified environment variable.
Args:
var (str): The environment variable to get.
Returns:
str: The value of the environment variable.
Raises:
ValueError: If the environment variable is not set.
"""
value = os.getenv(var)
if not value:
raise ValueError(f"Environment variable {var} is not set")
return value


def get_openai_api_key() -> str:
return get_required_env_var("OPENAI_API_KEY")


def get_astradb_application_token() -> str:
return get_required_env_var("ASTRA_DB_APPLICATION_TOKEN")


def get_astradb_api_endpoint() -> str:
return get_required_env_var("ASTRA_DB_API_ENDPOINT")
3 changes: 3 additions & 0 deletions src/backend/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
from langflow.services.database.models.user.model import User, UserCreate
from langflow.services.database.utils import session_getter
from langflow.services.deps import get_db_service
from tests.api_keys import get_openai_api_key

if TYPE_CHECKING:
from langflow.services.database.service import DatabaseService
Expand Down Expand Up @@ -463,6 +464,8 @@ def get_starter_project(active_user):
if not flow:
raise ValueError("No starter project found")

# ensure openai api key is set
get_openai_api_key()
new_flow_create = FlowCreate(
name=flow.name,
description=flow.description,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import pytest
from langflow.schema.message import Message
from tests.api_keys import get_openai_api_key
from tests.integration.utils import download_flow_from_github, run_json_flow


@pytest.mark.asyncio
@pytest.mark.api_key_required
async def test_1_0_15_basic_prompting():
api_key = get_openai_api_key()
json_flow = download_flow_from_github("Basic Prompting (Hello, World)", "1.0.15")
json_flow.set_value(json_flow.get_component_by_type("OpenAIModel"), "api_key", api_key)
outputs = await run_json_flow(json_flow, run_input="my name is bob, say hello!")
assert isinstance(outputs["message"], Message)
response = outputs["message"].text.lower()
assert "arr" in response or "ahoy" in response
Empty file.
Loading

0 comments on commit 96872f3

Please sign in to comment.