From 11346f982d2687d7055338cee212c9e920186df1 Mon Sep 17 00:00:00 2001 From: Jason Kidd Date: Wed, 17 Dec 2025 00:57:07 +0000 Subject: [PATCH] fix(azure_ai): return AzureAnthropicConfig for Claude models in get_provider_chat_config Claude models on Azure AI were incorrectly using AzureAIStudioConfig, causing tool calls to fail with invalid_request_error because tools remained in OpenAI format instead of being transformed to Anthropic format. --- litellm/utils.py | 2 ++ tests/test_litellm/test_utils.py | 27 +++++++++++++++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/litellm/utils.py b/litellm/utils.py index dfc0df5c9a2..49ab1744bfc 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -7258,6 +7258,8 @@ def get_provider_chat_config( # noqa: PLR0915 return litellm.AzureOpenAIGPT5Config() return litellm.AzureOpenAIConfig() elif litellm.LlmProviders.AZURE_AI == provider: + if "claude" in model.lower(): + return litellm.AzureAnthropicConfig() return litellm.AzureAIStudioConfig() elif litellm.LlmProviders.AZURE_TEXT == provider: return litellm.AzureOpenAITextConfig() diff --git a/tests/test_litellm/test_utils.py b/tests/test_litellm/test_utils.py index 2bd94488ba2..46b828e7940 100644 --- a/tests/test_litellm/test_utils.py +++ b/tests/test_litellm/test_utils.py @@ -2602,3 +2602,30 @@ def test_empty_list_content_returns_false(self): """Empty list content should return False.""" message = {"role": "user", "content": []} assert is_cached_message(message) is False + + +def test_azure_ai_claude_provider_config(): + """Test that Azure AI Claude models return AzureAnthropicConfig for proper tool transformation.""" + from litellm import AzureAnthropicConfig, AzureAIStudioConfig + from litellm.utils import ProviderConfigManager + + # Claude models should return AzureAnthropicConfig + config = ProviderConfigManager.get_provider_chat_config( + model="claude-sonnet-4-5", + provider=LlmProviders.AZURE_AI, + ) + assert isinstance(config, AzureAnthropicConfig) + + # Test case-insensitive matching + config = ProviderConfigManager.get_provider_chat_config( + model="Claude-Opus-4", + provider=LlmProviders.AZURE_AI, + ) + assert isinstance(config, AzureAnthropicConfig) + + # Non-Claude models should return AzureAIStudioConfig + config = ProviderConfigManager.get_provider_chat_config( + model="mistral-large", + provider=LlmProviders.AZURE_AI, + ) + assert isinstance(config, AzureAIStudioConfig)