From 61b7ef8ab21bfbaeada7a3990d3856f443614b7c Mon Sep 17 00:00:00 2001 From: Ali Mansoor Date: Sat, 4 Oct 2025 07:45:09 +0500 Subject: [PATCH] fix: exclude temperature parameter for gpt-5 and similar models - Added a safeguard to avoid passing `temperature` for models that do not support it - Introduced `excluded_models_temperature` list (currently includes `gpt-5`) - Maintains existing behavior for other models - Resolves issue #3556 Reference: https://github.com/livekit/agents/issues/3556 --- livekit-agents/livekit/agents/voice/run_result.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/livekit-agents/livekit/agents/voice/run_result.py b/livekit-agents/livekit/agents/voice/run_result.py index c3c1b6d7fc..fb39ea1bd2 100644 --- a/livekit-agents/livekit/agents/voice/run_result.py +++ b/livekit-agents/livekit/agents/voice/run_result.py @@ -880,13 +880,18 @@ async def check_intent(success: bool, reason: str) -> tuple[bool, str]: ) arguments: str | None = None + extra_kwargs = {} + excluded_models_temperature = ["gpt-5"] # Add model names here to exclude temperature + + if not any(excluded_model in llm_v.model for excluded_model in excluded_models_temperature): + extra_kwargs["temperature"] = 0.0 # TODO(theomonnom): LLMStream should provide utilities to make function calling easier. async for chunk in llm_v.chat( chat_ctx=chat_ctx, tools=[check_intent], tool_choice={"type": "function", "function": {"name": "check_intent"}}, - extra_kwargs={"temperature": 0.0}, + extra_kwargs=extra_kwargs, ): if not chunk.delta: continue