Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 1 addition & 21 deletions orchestrator/agentic_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@

if TYPE_CHECKING:
from pydantic_ai.models.openai import OpenAIModel
from pydantic_ai.toolsets import FunctionToolset

logger = get_logger(__name__)

Expand All @@ -38,27 +37,24 @@ def __init__(
*args: Any,
llm_settings: LLMSettings = llm_settings,
agent_model: "OpenAIModel | str | None" = None,
agent_tools: "list[FunctionToolset] | None" = None,
**kwargs: Any,
) -> None:
"""Initialize the `LLMOrchestratorCore` class.

This class extends `OrchestratorCore` with LLM features (search and agent).
It runs the search migration and mounts the agent endpoint based on feature flags.
It runs the search migration based on feature flags.

Args:
*args: All the normal arguments passed to the `OrchestratorCore` class.
llm_settings: A class of settings for the LLM
agent_model: Override the agent model (defaults to llm_settings.AGENT_MODEL)
agent_tools: A list of tools that can be used by the agent
**kwargs: Additional arguments passed to the `OrchestratorCore` class.

Returns:
None
"""
self.llm_settings = llm_settings
self.agent_model = agent_model or llm_settings.AGENT_MODEL
self.agent_tools = agent_tools

super().__init__(*args, **kwargs)

Expand All @@ -79,22 +75,6 @@ def __init__(
)
raise

# Mount agent endpoint if agent is enabled
if self.llm_settings.AGENT_ENABLED:
logger.info("Initializing agent features", model=self.agent_model)
try:
from orchestrator.search.agent import build_agent_router

agent_app = build_agent_router(self.agent_model, self.agent_tools)
self.mount("/agent", agent_app)
except ImportError as e:
logger.error(
"Unable to initialize agent features. Please install agent dependencies: "
"`pip install orchestrator-core[agent]`",
error=str(e),
)
raise


main_typer_app = typer.Typer()
main_typer_app.add_typer(cli_app, name="orchestrator", help="The orchestrator CLI commands")
Expand Down
5 changes: 5 additions & 0 deletions orchestrator/api/api_v1/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,3 +95,8 @@
api_router.include_router(
search.router, prefix="/search", tags=["Core", "Search"], dependencies=[Depends(authorize)]
)

if llm_settings.AGENT_ENABLED:
from orchestrator.api.api_v1.endpoints import agent

api_router.include_router(agent.router, prefix="/agent", tags=["Core", "Agent"], dependencies=[Depends(authorize)])
50 changes: 50 additions & 0 deletions orchestrator/api/api_v1/endpoints/agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
# Copyright 2019-2025 SURF, GÉANT.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from functools import cache
from typing import Annotated

from fastapi import APIRouter, Depends, Request
from pydantic_ai.ag_ui import StateDeps, handle_ag_ui_request
from pydantic_ai.agent import Agent
from starlette.responses import Response
from structlog import get_logger

from orchestrator.llm_settings import llm_settings
from orchestrator.search.agent import build_agent_instance
from orchestrator.search.agent.state import SearchState

router = APIRouter()
logger = get_logger(__name__)


@cache
def get_agent() -> Agent[StateDeps[SearchState], str]:
"""Dependency to provide the agent instance.

The agent is built once and cached for the lifetime of the application.
"""
return build_agent_instance(llm_settings.AGENT_MODEL)


@router.post("/")
async def agent_conversation(
request: Request,
agent: Annotated[Agent[StateDeps[SearchState], str], Depends(get_agent)],
) -> Response:
"""Agent conversation endpoint using pydantic-ai ag_ui protocol.

This endpoint handles the interactive agent conversation for search.
"""
initial_state = SearchState()
return await handle_ag_ui_request(agent, request, deps=StateDeps(initial_state))
Loading