Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ def run( # type: ignore[override]
messages: str | ChatMessage | Sequence[str | ChatMessage] | None = None,
*,
thread: AgentThread | None = None,
response_format: type[BaseModel] | None = None,
options: dict[str, Any] | None = None,
**kwargs: Any,
) -> AgentTask:
"""Execute the agent with messages and return an AgentTask for orchestrations.
Expand All @@ -208,7 +208,7 @@ def run( # type: ignore[override]
Args:
messages: The message(s) to send to the agent
thread: Optional agent thread for conversation context
response_format: Optional Pydantic model for response parsing
options: Optional dict containing chat options like response_format, tools, etc.
**kwargs: Additional arguments (enable_tool_calls)

Returns:
Expand All @@ -219,13 +219,15 @@ def run( # type: ignore[override]
def my_orchestration(context):
agent = app.get_agent(context, "MyAgent")
thread = agent.get_new_thread()
response = yield agent.run("Hello", thread=thread)
response = yield agent.run("Hello", thread=thread, options={"response_format": MyModel})
# response is typed as AgentResponse
"""
message_str = self._normalize_messages(messages)

# Extract optional parameters from kwargs
enable_tool_calls = kwargs.get("enable_tool_calls", True)
# Extract options from the options dict (aligned with ChatAgent pattern)
opts = options or {}
response_format: type[BaseModel] | None = opts.get("response_format")
enable_tool_calls = opts.get("enable_tool_calls", kwargs.get("enable_tool_calls", True))

# Get the session ID for the entity
if isinstance(thread, DurableAgentThread) and thread.session_id is not None:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,7 @@ class SampleSchema(BaseModel):
# Create thread and call
thread = agent.get_new_thread()

task = agent.run(messages="Test message", thread=thread, response_format=SampleSchema)
task = agent.run(messages="Test message", thread=thread, options={"response_format": SampleSchema})

assert isinstance(task, AgentTask)
assert task.children[0] == entity_task
Expand Down
6 changes: 3 additions & 3 deletions python/packages/core/tests/azure/test_azure_chat_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -657,7 +657,7 @@ async def test_azure_openai_chat_client_response_tools() -> None:

assert response is not None
assert isinstance(response, ChatResponse)
assert "scientists" in response.text
assert "Emily" in response.text or "David" in response.text


@pytest.mark.flaky
Expand Down Expand Up @@ -692,7 +692,7 @@ async def test_azure_openai_chat_client_streaming() -> None:
if isinstance(content, TextContent) and content.text:
full_message += content.text

assert "scientists" in full_message
assert "Emily" in full_message or "David" in full_message


@pytest.mark.flaky
Expand All @@ -718,7 +718,7 @@ async def test_azure_openai_chat_client_streaming_tools() -> None:
if isinstance(content, TextContent) and content.text:
full_message += content.text

assert "scientists" in full_message
assert "Emily" in full_message or "David" in full_message


@pytest.mark.flaky
Expand Down
Loading