diff --git a/agents-api/agents_api/queries/docs/list_docs.py b/agents-api/agents_api/queries/docs/list_docs.py index 852149934..2a1d3f214 100644 --- a/agents-api/agents_api/queries/docs/list_docs.py +++ b/agents-api/agents_api/queries/docs/list_docs.py @@ -41,6 +41,7 @@ AND doc_own.owner_id = $4 """ + @rewrap_exceptions(common_db_exceptions("doc", ["list"])) @wrap_in_class( Doc, @@ -117,7 +118,9 @@ async def list_docs( d.created_at""" # Add sorting and pagination - query += f" ORDER BY {sort_by} {direction} LIMIT ${len(params) + 1} OFFSET ${len(params) + 2}" + query += ( + f" ORDER BY {sort_by} {direction} LIMIT ${len(params) + 1} OFFSET ${len(params) + 2}" + ) params.extend([limit, offset]) return query, params diff --git a/agents-api/agents_api/queries/executions/create_execution_transition.py b/agents-api/agents_api/queries/executions/create_execution_transition.py index fb4d0b2b0..98d48e5e9 100644 --- a/agents-api/agents_api/queries/executions/create_execution_transition.py +++ b/agents-api/agents_api/queries/executions/create_execution_transition.py @@ -11,7 +11,7 @@ from ...common.utils.datetime import utcnow from ...common.utils.db_exceptions import common_db_exceptions from ...metrics.counters import increase_counter -from ..utils import pg_query, rewrap_exceptions, wrap_in_class +from ..utils import pg_query, rewrap_exceptions, serialize_model_data, wrap_in_class # Query to create a transition create_execution_transition_query = """ @@ -121,14 +121,7 @@ async def create_execution_transition( data.execution_id = execution_id # Dump to json - if isinstance(data.output, list): - data.output = [ - item.model_dump(mode="json") if hasattr(item, "model_dump") else item - for item in data.output - ] - - elif hasattr(data.output, "model_dump"): - data.output = data.output.model_dump(mode="json") + data.output = serialize_model_data(data.output) # Prepare the transition data transition_data = data.model_dump(exclude_unset=True, exclude={"id"}) diff --git a/agents-api/agents_api/queries/utils.py b/agents-api/agents_api/queries/utils.py index b0c7eda45..6ff284f74 100644 --- a/agents-api/agents_api/queries/utils.py +++ b/agents-api/agents_api/queries/utils.py @@ -297,3 +297,22 @@ def run_concurrently( ] return [future.result() for future in concurrent.futures.as_completed(futures)] + + +def serialize_model_data(data: Any) -> Any: + """ + Recursively serialize Pydantic models and their nested structures. + + Args: + data: Any data structure that might contain Pydantic models + + Returns: + JSON-serializable data structure + """ + if hasattr(data, "model_dump"): + return data.model_dump(mode="json") + if isinstance(data, dict): + return {key: serialize_model_data(value) for key, value in data.items()} + if isinstance(data, list | tuple): + return [serialize_model_data(item) for item in data] + return data diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py index 1458bf915..cf4fd2f7d 100644 --- a/agents-api/tests/fixtures.py +++ b/agents-api/tests/fixtures.py @@ -233,7 +233,7 @@ async def test_doc_with_embedding(dsn=pg_dsn, developer=test_developer, doc=test # ensuring it's up-to-date before executing queries. # This can be achieved by executing a REINDEX command await pool.execute("REINDEX DATABASE") - + yield await get_doc(developer_id=developer.id, doc_id=doc.id, connection_pool=pool) diff --git a/agents-api/tests/test_docs_queries.py b/agents-api/tests/test_docs_queries.py index cb52b6df7..dc921b4e9 100644 --- a/agents-api/tests/test_docs_queries.py +++ b/agents-api/tests/test_docs_queries.py @@ -145,6 +145,7 @@ async def _(dsn=pg_dsn, developer=test_developer, user=test_user): assert any(d.id == doc_user_different_metadata.id for d in docs_list_metadata) assert any(d.metadata == {"test": "test2"} for d in docs_list_metadata) + @test("query: list agent docs") async def _(dsn=pg_dsn, developer=test_developer, agent=test_agent): pool = await create_db_pool(dsn=dsn)