Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
158 changes: 128 additions & 30 deletions litellm/llms/bedrock/chat/converse_transformation.py
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

document this param @timelfrink

also ideally - shouldn't we have a consistent param for such values across gemini / bedrock?

Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,77 @@ def get_config(cls):
and v is not None
}

def _validate_request_metadata(self, metadata: dict) -> None:
"""
Validate requestMetadata according to AWS Bedrock Converse API constraints.

Constraints:
- Maximum of 16 items
- Keys: 1-256 characters, pattern [a-zA-Z0-9\\s:_@$#=/+,-.]{1,256}
- Values: 0-256 characters, pattern [a-zA-Z0-9\\s:_@$#=/+,-.]{0,256}
"""
import re

if not isinstance(metadata, dict):
raise litellm.exceptions.BadRequestError(
message="request_metadata must be a dictionary",
model="bedrock",
llm_provider="bedrock",
)

if len(metadata) > 16:
raise litellm.exceptions.BadRequestError(
message="request_metadata can contain a maximum of 16 items",
model="bedrock",
llm_provider="bedrock",
)

key_pattern = re.compile(r'^[a-zA-Z0-9\s:_@$#=/+,.-]{1,256}$')
value_pattern = re.compile(r'^[a-zA-Z0-9\s:_@$#=/+,.-]{0,256}$')

for key, value in metadata.items():
if not isinstance(key, str):
raise litellm.exceptions.BadRequestError(
message="request_metadata keys must be strings",
model="bedrock",
llm_provider="bedrock",
)

if not isinstance(value, str):
raise litellm.exceptions.BadRequestError(
message="request_metadata values must be strings",
model="bedrock",
llm_provider="bedrock",
)

if len(key) == 0 or len(key) > 256:
raise litellm.exceptions.BadRequestError(
message="request_metadata key length must be 1-256 characters",
model="bedrock",
llm_provider="bedrock",
)

if len(value) > 256:
raise litellm.exceptions.BadRequestError(
message="request_metadata value length must be 0-256 characters",
model="bedrock",
llm_provider="bedrock",
)

if not key_pattern.match(key):
raise litellm.exceptions.BadRequestError(
message=f"request_metadata key '{key}' contains invalid characters. Allowed: [a-zA-Z0-9\\s:_@$#=/+,.-]",
model="bedrock",
llm_provider="bedrock",
)

if not value_pattern.match(value):
raise litellm.exceptions.BadRequestError(
message=f"request_metadata value '{value}' contains invalid characters. Allowed: [a-zA-Z0-9\\s:_@$#=/+,.-]",
model="bedrock",
llm_provider="bedrock",
)

def get_supported_openai_params(self, model: str) -> List[str]:
from litellm.utils import supports_function_calling

Expand All @@ -133,6 +204,7 @@ def get_supported_openai_params(self, model: str) -> List[str]:
"top_p",
"extra_headers",
"response_format",
"request_metadata",
]

if (
Expand Down Expand Up @@ -442,6 +514,10 @@ def map_openai_params(
optional_params["thinking"] = AnthropicConfig._map_reasoning_effort(
value
)
if param == "request_metadata":
if value is not None:
self._validate_request_metadata(value)
optional_params["request_metadata"] = value

# Only update thinking tokens for non-GPT-OSS models
if "gpt-oss" not in model:
Expand Down Expand Up @@ -632,34 +708,8 @@ def _handle_top_k_value(self, model: str, inference_params: dict) -> dict:

return {}

def _transform_request_helper(
self,
model: str,
system_content_blocks: List[SystemContentBlock],
optional_params: dict,
messages: Optional[List[AllMessageValues]] = None,
headers: Optional[dict] = None,
) -> CommonRequestObject:
## VALIDATE REQUEST
"""
Bedrock doesn't support tool calling without `tools=` param specified.
"""
if (
"tools" not in optional_params
and messages is not None
and has_tool_call_blocks(messages)
):
if litellm.modify_params:
optional_params["tools"] = add_dummy_tool(
custom_llm_provider="bedrock_converse"
)
else:
raise litellm.UnsupportedParamsError(
message="Bedrock doesn't support tool calling without `tools=` param specified. Pass `tools=` param OR set `litellm.modify_params = True` // `litellm_settings::modify_params: True` to add dummy tool to the request.",
model="",
llm_provider="bedrock",
)

def _prepare_request_params(self, optional_params: dict, model: str) -> tuple[dict, dict, dict]:
"""Prepare and separate request parameters."""
inference_params = copy.deepcopy(optional_params)
supported_converse_params = list(
AmazonConverseConfig.__annotations__.keys()
Expand All @@ -673,6 +723,11 @@ def _transform_request_helper(
)
inference_params.pop("json_mode", None) # used for handling json_schema

# Extract request_metadata before processing other parameters
request_metadata = inference_params.pop("request_metadata", None)
if request_metadata is not None:
self._validate_request_metadata(request_metadata)

# keep supported params in 'inference_params', and set all model-specific params in 'additional_request_params'
additional_request_params = {
k: v for k, v in inference_params.items() if k not in total_supported_params
Expand All @@ -686,9 +741,10 @@ def _transform_request_helper(
self._handle_top_k_value(model, inference_params)
)

original_tools = inference_params.pop("tools", [])
return inference_params, additional_request_params, request_metadata

# Initialize bedrock_tools
def _process_tools_and_beta(self, original_tools: list, model: str, headers: Optional[dict], additional_request_params: dict) -> tuple[List[ToolBlock], list]:
"""Process tools and collect anthropic_beta values."""
bedrock_tools: List[ToolBlock] = []

# Collect anthropic_beta values from user headers
Expand Down Expand Up @@ -730,6 +786,44 @@ def _transform_request_helper(
seen.add(beta)
additional_request_params["anthropic_beta"] = unique_betas

return bedrock_tools, anthropic_beta_list

def _transform_request_helper(
self,
model: str,
system_content_blocks: List[SystemContentBlock],
optional_params: dict,
messages: Optional[List[AllMessageValues]] = None,
headers: Optional[dict] = None,
) -> CommonRequestObject:
## VALIDATE REQUEST
"""
Bedrock doesn't support tool calling without `tools=` param specified.
"""
if (
"tools" not in optional_params
and messages is not None
and has_tool_call_blocks(messages)
):
if litellm.modify_params:
optional_params["tools"] = add_dummy_tool(
custom_llm_provider="bedrock_converse"
)
else:
raise litellm.UnsupportedParamsError(
message="Bedrock doesn't support tool calling without `tools=` param specified. Pass `tools=` param OR set `litellm.modify_params = True` // `litellm_settings::modify_params: True` to add dummy tool to the request.",
model="",
llm_provider="bedrock",
)

# Prepare and separate parameters
inference_params, additional_request_params, request_metadata = self._prepare_request_params(optional_params, model)

original_tools = inference_params.pop("tools", [])

# Process tools and collect beta values
bedrock_tools, anthropic_beta_list = self._process_tools_and_beta(original_tools, model, headers, additional_request_params)

bedrock_tool_config: Optional[ToolConfigBlock] = None
if len(bedrock_tools) > 0:
tool_choice_values: ToolChoiceValuesBlock = inference_params.pop(
Expand Down Expand Up @@ -759,6 +853,10 @@ def _transform_request_helper(
if bedrock_tool_config is not None:
data["toolConfig"] = bedrock_tool_config

# Request Metadata (top-level field)
if request_metadata is not None:
data["requestMetadata"] = request_metadata

return data

async def _async_transform_request(
Expand Down
8 changes: 2 additions & 6 deletions litellm/types/llms/bedrock.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,11 @@
import json
from typing import Any, List, Literal, Optional, Union
from typing import Any, Dict, List, Literal, Optional, Union

from typing_extensions import (
TYPE_CHECKING,
Protocol,
Required,
Self,
TypedDict,
TypeGuard,
get_origin,
override,
runtime_checkable,
)

from .openai import ChatCompletionToolCallChunk
Expand Down Expand Up @@ -225,6 +220,7 @@ class CommonRequestObject(
toolConfig: ToolConfigBlock
guardrailConfig: Optional[GuardrailConfigBlock]
performanceConfig: Optional[PerformanceConfigBlock]
requestMetadata: Optional[Dict[str, str]]


class RequestObject(CommonRequestObject, total=False):
Expand Down
Loading
Loading