Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions python/packages/openai/agent_framework_openai/_chat_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -2028,6 +2028,7 @@ def _parse_chunk_from_openai(
local_shell_tool_name = self._get_local_shell_tool_name(options.get("tools"))
conversation_id: str | None = None
response_id: str | None = None
created_at: str | None = None
continuation_token: OpenAIContinuationToken | None = None
model = self.model
match event.type:
Expand Down Expand Up @@ -2209,6 +2210,9 @@ def _parse_chunk_from_openai(
response_id = event.response.id
conversation_id = self._get_conversation_id(event.response, options.get("store"))
model = event.response.model
created_at = datetime.fromtimestamp(event.response.created_at, tz=timezone.utc).strftime(
"%Y-%m-%dT%H:%M:%S.%fZ"
)
if event.response.usage:
usage = self._parse_usage_from_openai(event.response.usage)
if usage:
Expand Down Expand Up @@ -2589,6 +2593,7 @@ def _get_ann_value(key: str) -> Any:
response_id=response_id,
role="assistant",
model=model,
created_at=created_at,
continuation_token=continuation_token,
additional_properties=metadata,
raw_representation=event,
Expand Down
24 changes: 24 additions & 0 deletions python/packages/openai/tests/openai/test_openai_chat_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -2192,6 +2192,7 @@ def test_streaming_chunk_with_usage_only() -> None:
mock_event.response.id = "resp_usage"
mock_event.response.model = "test-model"
mock_event.response.conversation = None
mock_event.response.created_at = 1000000000.0
mock_event.response.usage = MagicMock()
mock_event.response.usage.input_tokens = 50
mock_event.response.usage.output_tokens = 25
Expand Down Expand Up @@ -4438,13 +4439,36 @@ def test_streaming_response_completed_no_continuation_token() -> None:
mock_event.response.conversation = MagicMock()
mock_event.response.conversation.id = "conv_done"
mock_event.response.model = "test-model"
mock_event.response.created_at = 1000000000.0
mock_event.response.usage = None

update = client._parse_chunk_from_openai(mock_event, chat_options, function_call_ids)

assert update.continuation_token is None


def test_streaming_response_completed_sets_created_at() -> None:
"""Test that response.completed sets created_at on the ChatResponseUpdate."""
client = OpenAIChatClient(model="test-model", api_key="test-key")
chat_options: dict[str, Any] = {}
function_call_ids: dict[int, tuple[str, str]] = {}

mock_event = MagicMock()
mock_event.type = "response.completed"
mock_event.response = MagicMock()
mock_event.response.id = "resp_created"
mock_event.response.conversation = MagicMock()
mock_event.response.conversation.id = "conv_created"
mock_event.response.model = "test-model"
mock_event.response.created_at = 1000000000.0
mock_event.response.usage = None

update = client._parse_chunk_from_openai(mock_event, chat_options, function_call_ids)

assert update.created_at is not None
assert update.created_at == "2001-09-09T01:46:40.000000Z"


def test_map_chat_to_agent_update_preserves_continuation_token() -> None:
"""Test that map_chat_to_agent_update propagates continuation_token."""
from agent_framework._types import map_chat_to_agent_update
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from pydantic import Field

try:
import orjson
import orjson # pyright: ignore[reportMissingImports]
except ImportError:
orjson = None

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from pydantic import Field

try:
import orjson
import orjson # pyright: ignore[reportMissingImports]
except ImportError:
orjson = None

Expand Down
Loading