Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 31 additions & 0 deletions tests/entrypoints/openai/test_response_api_with_harmony.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
Message,
)

from vllm.entrypoints.openai.protocol import serialize_message, serialize_messages

from ...utils import RemoteOpenAIServer

MODEL_NAME = "openai/gpt-oss-20b"
Expand Down Expand Up @@ -758,3 +760,32 @@ async def test_output_messages_enabled(client: OpenAI, model_name: str, server):
assert response.status == "completed"
assert len(response.input_messages) > 0
assert len(response.output_messages) > 0


def test_serialize_message() -> None:
Comment thread
Jialin marked this conversation as resolved.
dict_value = {"a": 1, "b": "2"}
assert serialize_message(dict_value) == dict_value

msg_value = {
"role": "assistant",
"name": None,
"content": [{"type": "text", "text": "Test 1"}],
"channel": "analysis",
}
msg = Message.from_dict(msg_value)
assert serialize_message(msg) == msg_value


def test_serialize_messages() -> None:
assert serialize_messages(None) is None
assert serialize_messages([]) is None

dict_value = {"a": 3, "b": "4"}
msg_value = {
"role": "assistant",
"name": None,
"content": [{"type": "text", "text": "Test 2"}],
"channel": "analysis",
}
msg = Message.from_dict(msg_value)
assert serialize_messages([msg, dict_value]) == [msg_value, dict_value]
46 changes: 22 additions & 24 deletions vllm/entrypoints/openai/protocol.py
Original file line number Diff line number Diff line change
Expand Up @@ -2069,6 +2069,26 @@ class ResponseUsage(OpenAIBaseModel):
total_tokens: int


def serialize_message(msg):
"""
Serializes a single message
"""
if isinstance(msg, dict):
return msg
elif hasattr(msg, "__dict__"):
return msg.to_dict()
else:
# fallback to pyandic dump
return msg.model_dump_json()
Comment thread
Jialin marked this conversation as resolved.


def serialize_messages(msgs):
"""
Serializes multiple messages
"""
return [serialize_message(msg) for msg in msgs] if msgs else None


class ResponsesResponse(OpenAIBaseModel):
id: str = Field(default_factory=lambda: f"resp_{random_uuid()}")
created_at: int = Field(default_factory=lambda: int(time.time()))
Expand Down Expand Up @@ -2111,35 +2131,13 @@ class ResponsesResponse(OpenAIBaseModel):
# https://github.com/openai/harmony/issues/78
@field_serializer("output_messages", when_used="json")
def serialize_output_messages(self, msgs, _info):
if msgs:
serialized = []
for m in msgs:
if isinstance(m, dict):
serialized.append(m)
elif hasattr(m, "__dict__"):
serialized.append(m.to_dict())
else:
# fallback to pyandic dump
serialized.append(m.model_dump_json())
return serialized
return None
return serialize_messages(msgs)

# NOTE: openAI harmony doesn't serialize TextContent properly, this fixes it
# https://github.com/openai/harmony/issues/78
@field_serializer("input_messages", when_used="json")
def serialize_input_messages(self, msgs, _info):
if msgs:
serialized = []
for m in msgs:
if isinstance(m, dict):
serialized.append(m)
elif hasattr(m, "__dict__"):
serialized.append(m.to_dict())
else:
# fallback to pyandic dump
serialized.append(m.model_dump_json())
return serialized
return None
return serialize_messages(msgs)

@classmethod
def from_request(
Expand Down
Loading