Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 22 additions & 2 deletions litellm/llms/openai_like/dynamic_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from typing import Any, Coroutine, List, Literal, Optional, Tuple, Union, overload

from litellm._logging import verbose_logger
from litellm.litellm_core_utils.prompt_templates.common_utils import (
handle_messages_with_content_list_to_str_conversion,
)
Expand Down Expand Up @@ -96,8 +97,27 @@ def get_complete_url(
return api_base

def get_supported_openai_params(self, model: str) -> list:
"""Get supported OpenAI params from base class"""
return super().get_supported_openai_params(model=model)
"""Get supported OpenAI params, excluding tool-related params for models
that don't support function calling."""
from litellm.utils import supports_function_calling

supported_params = super().get_supported_openai_params(model=model)

_supports_fc = supports_function_calling(
model=model, custom_llm_provider=provider.slug
)

if not _supports_fc:
tool_params = ["tools", "tool_choice", "function_call", "functions", "parallel_tool_calls"]
for param in tool_params:
if param in supported_params:
supported_params.remove(param)
verbose_logger.debug(
f"Model {model} on provider {provider.slug} does not support "
f"function calling — removed tool-related params from supported params."
)

return supported_params

def map_openai_params(
self,
Expand Down
41 changes: 41 additions & 0 deletions tests/test_litellm/llms/openai_like/test_json_providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,47 @@ def test_supported_params(self):
assert isinstance(supported, list)
assert len(supported) > 0

def test_tool_params_excluded_when_function_calling_not_supported(self):
"""Test that tool-related params are excluded for models that don't support
function calling. Regression test for https://github.com/BerriAI/litellm/issues/21125"""
from litellm.llms.openai_like.dynamic_config import create_config_class
from litellm.llms.openai_like.json_loader import JSONProviderRegistry

provider = JSONProviderRegistry.get("publicai")
config_class = create_config_class(provider)
config = config_class()

# Mock supports_function_calling to return False
with patch("litellm.utils.supports_function_calling", return_value=False):
supported = config.get_supported_openai_params("some-model-without-fc")

tool_params = ["tools", "tool_choice", "function_call", "functions", "parallel_tool_calls"]
for param in tool_params:
assert param not in supported, (
f"'{param}' should not be in supported params when function calling is not supported"
)

# Non-tool params should still be present
assert "temperature" in supported
assert "max_tokens" in supported
assert "stop" in supported

def test_tool_params_included_when_function_calling_supported(self):
"""Test that tool-related params are included for models that support function calling."""
from litellm.llms.openai_like.dynamic_config import create_config_class
from litellm.llms.openai_like.json_loader import JSONProviderRegistry

provider = JSONProviderRegistry.get("publicai")
config_class = create_config_class(provider)
config = config_class()

# Mock supports_function_calling to return True
with patch("litellm.utils.supports_function_calling", return_value=True):
supported = config.get_supported_openai_params("some-model-with-fc")

assert "tools" in supported
assert "tool_choice" in supported

def test_provider_resolution(self):
"""Test that provider resolution finds JSON providers"""
from litellm.litellm_core_utils.get_llm_provider_logic import (
Expand Down
Loading