Skip to content

Commit 73c1436

Browse files
zastrowmVamil Gandhi
authored andcommitted
feat: Warn on unknown model configuration properties (strands-agents#819)
Implement the ability for all built-in providers to emit a warning when unknown configuration properties are included. Co-authored-by: Mackenzie Zastrow <[email protected]>
1 parent 192413a commit 73c1436

20 files changed

+250
-0
lines changed
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
"""Configuration validation utilities for model providers."""
2+
3+
import warnings
4+
from typing import Any, Mapping, Type
5+
6+
from typing_extensions import get_type_hints
7+
8+
9+
def validate_config_keys(config_dict: Mapping[str, Any], config_class: Type) -> None:
10+
"""Validate that config keys match the TypedDict fields.
11+
12+
Args:
13+
config_dict: Dictionary of configuration parameters
14+
config_class: TypedDict class to validate against
15+
"""
16+
valid_keys = set(get_type_hints(config_class).keys())
17+
provided_keys = set(config_dict.keys())
18+
invalid_keys = provided_keys - valid_keys
19+
20+
if invalid_keys:
21+
warnings.warn(
22+
f"Invalid configuration parameters: {sorted(invalid_keys)}."
23+
f"\nValid parameters are: {sorted(valid_keys)}."
24+
f"\n"
25+
f"\nSee https://github.com/strands-agents/sdk-python/issues/815",
26+
stacklevel=4,
27+
)

src/strands/models/anthropic.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
from ..types.exceptions import ContextWindowOverflowException, ModelThrottledException
2020
from ..types.streaming import StreamEvent
2121
from ..types.tools import ToolSpec
22+
from ._config_validation import validate_config_keys
2223
from .model import Model
2324

2425
logger = logging.getLogger(__name__)
@@ -67,6 +68,7 @@ def __init__(self, *, client_args: Optional[dict[str, Any]] = None, **model_conf
6768
For a complete list of supported arguments, see https://docs.anthropic.com/en/api/client-sdks.
6869
**model_config: Configuration options for the Anthropic model.
6970
"""
71+
validate_config_keys(model_config, self.AnthropicConfig)
7072
self.config = AnthropicModel.AnthropicConfig(**model_config)
7173

7274
logger.debug("config=<%s> | initializing", self.config)
@@ -81,6 +83,7 @@ def update_config(self, **model_config: Unpack[AnthropicConfig]) -> None: # typ
8183
Args:
8284
**model_config: Configuration overrides.
8385
"""
86+
validate_config_keys(model_config, self.AnthropicConfig)
8487
self.config.update(model_config)
8588

8689
@override

src/strands/models/bedrock.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
)
2525
from ..types.streaming import CitationsDelta, StreamEvent
2626
from ..types.tools import ToolResult, ToolSpec
27+
from ._config_validation import validate_config_keys
2728
from .model import Model
2829

2930
logger = logging.getLogger(__name__)
@@ -166,6 +167,7 @@ def update_config(self, **model_config: Unpack[BedrockConfig]) -> None: # type:
166167
Args:
167168
**model_config: Configuration overrides.
168169
"""
170+
validate_config_keys(model_config, self.BedrockConfig)
169171
self.config.update(model_config)
170172

171173
@override

src/strands/models/litellm.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
from ..types.content import ContentBlock, Messages
1616
from ..types.streaming import StreamEvent
1717
from ..types.tools import ToolSpec
18+
from ._config_validation import validate_config_keys
1819
from .openai import OpenAIModel
1920

2021
logger = logging.getLogger(__name__)
@@ -49,6 +50,7 @@ def __init__(self, client_args: Optional[dict[str, Any]] = None, **model_config:
4950
**model_config: Configuration options for the LiteLLM model.
5051
"""
5152
self.client_args = client_args or {}
53+
validate_config_keys(model_config, self.LiteLLMConfig)
5254
self.config = dict(model_config)
5355

5456
logger.debug("config=<%s> | initializing", self.config)
@@ -60,6 +62,7 @@ def update_config(self, **model_config: Unpack[LiteLLMConfig]) -> None: # type:
6062
Args:
6163
**model_config: Configuration overrides.
6264
"""
65+
validate_config_keys(model_config, self.LiteLLMConfig)
6366
self.config.update(model_config)
6467

6568
@override

src/strands/models/llamaapi.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
from ..types.exceptions import ModelThrottledException
2020
from ..types.streaming import StreamEvent, Usage
2121
from ..types.tools import ToolResult, ToolSpec, ToolUse
22+
from ._config_validation import validate_config_keys
2223
from .model import Model
2324

2425
logger = logging.getLogger(__name__)
@@ -60,6 +61,7 @@ def __init__(
6061
client_args: Arguments for the Llama API client.
6162
**model_config: Configuration options for the Llama API model.
6263
"""
64+
validate_config_keys(model_config, self.LlamaConfig)
6365
self.config = LlamaAPIModel.LlamaConfig(**model_config)
6466
logger.debug("config=<%s> | initializing", self.config)
6567

@@ -75,6 +77,7 @@ def update_config(self, **model_config: Unpack[LlamaConfig]) -> None: # type: i
7577
Args:
7678
**model_config: Configuration overrides.
7779
"""
80+
validate_config_keys(model_config, self.LlamaConfig)
7881
self.config.update(model_config)
7982

8083
@override

src/strands/models/mistral.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
from ..types.exceptions import ModelThrottledException
1717
from ..types.streaming import StopReason, StreamEvent
1818
from ..types.tools import ToolResult, ToolSpec, ToolUse
19+
from ._config_validation import validate_config_keys
1920
from .model import Model
2021

2122
logger = logging.getLogger(__name__)
@@ -82,6 +83,7 @@ def __init__(
8283
if not 0.0 <= top_p <= 1.0:
8384
raise ValueError(f"top_p must be between 0.0 and 1.0, got {top_p}")
8485

86+
validate_config_keys(model_config, self.MistralConfig)
8587
self.config = MistralModel.MistralConfig(**model_config)
8688

8789
# Set default stream to True if not specified
@@ -101,6 +103,7 @@ def update_config(self, **model_config: Unpack[MistralConfig]) -> None: # type:
101103
Args:
102104
**model_config: Configuration overrides.
103105
"""
106+
validate_config_keys(model_config, self.MistralConfig)
104107
self.config.update(model_config)
105108

106109
@override

src/strands/models/ollama.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
from ..types.content import ContentBlock, Messages
1515
from ..types.streaming import StopReason, StreamEvent
1616
from ..types.tools import ToolSpec
17+
from ._config_validation import validate_config_keys
1718
from .model import Model
1819

1920
logger = logging.getLogger(__name__)
@@ -70,6 +71,7 @@ def __init__(
7071
"""
7172
self.host = host
7273
self.client_args = ollama_client_args or {}
74+
validate_config_keys(model_config, self.OllamaConfig)
7375
self.config = OllamaModel.OllamaConfig(**model_config)
7476

7577
logger.debug("config=<%s> | initializing", self.config)
@@ -81,6 +83,7 @@ def update_config(self, **model_config: Unpack[OllamaConfig]) -> None: # type:
8183
Args:
8284
**model_config: Configuration overrides.
8385
"""
86+
validate_config_keys(model_config, self.OllamaConfig)
8487
self.config.update(model_config)
8588

8689
@override

src/strands/models/openai.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
from ..types.content import ContentBlock, Messages
1818
from ..types.streaming import StreamEvent
1919
from ..types.tools import ToolResult, ToolSpec, ToolUse
20+
from ._config_validation import validate_config_keys
2021
from .model import Model
2122

2223
logger = logging.getLogger(__name__)
@@ -61,6 +62,7 @@ def __init__(self, client_args: Optional[dict[str, Any]] = None, **model_config:
6162
For a complete list of supported arguments, see https://pypi.org/project/openai/.
6263
**model_config: Configuration options for the OpenAI model.
6364
"""
65+
validate_config_keys(model_config, self.OpenAIConfig)
6466
self.config = dict(model_config)
6567

6668
logger.debug("config=<%s> | initializing", self.config)
@@ -75,6 +77,7 @@ def update_config(self, **model_config: Unpack[OpenAIConfig]) -> None: # type:
7577
Args:
7678
**model_config: Configuration overrides.
7779
"""
80+
validate_config_keys(model_config, self.OpenAIConfig)
7881
self.config.update(model_config)
7982

8083
@override

src/strands/models/sagemaker.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
from ..types.content import ContentBlock, Messages
1616
from ..types.streaming import StreamEvent
1717
from ..types.tools import ToolResult, ToolSpec
18+
from ._config_validation import validate_config_keys
1819
from .openai import OpenAIModel
1920

2021
T = TypeVar("T", bound=BaseModel)
@@ -146,6 +147,8 @@ def __init__(
146147
boto_session: Boto Session to use when calling the SageMaker Runtime.
147148
boto_client_config: Configuration to use when creating the SageMaker-Runtime Boto Client.
148149
"""
150+
validate_config_keys(endpoint_config, self.SageMakerAIEndpointConfig)
151+
validate_config_keys(payload_config, self.SageMakerAIPayloadSchema)
149152
payload_config.setdefault("stream", True)
150153
payload_config.setdefault("tool_results_as_user_messages", False)
151154
self.endpoint_config = dict(endpoint_config)
@@ -180,6 +183,7 @@ def update_config(self, **endpoint_config: Unpack[SageMakerAIEndpointConfig]) ->
180183
Args:
181184
**endpoint_config: Configuration overrides.
182185
"""
186+
validate_config_keys(endpoint_config, self.SageMakerAIEndpointConfig)
183187
self.endpoint_config.update(endpoint_config)
184188

185189
@override

src/strands/models/writer.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
from ..types.exceptions import ModelThrottledException
1818
from ..types.streaming import StreamEvent
1919
from ..types.tools import ToolResult, ToolSpec, ToolUse
20+
from ._config_validation import validate_config_keys
2021
from .model import Model
2122

2223
logger = logging.getLogger(__name__)
@@ -53,6 +54,7 @@ def __init__(self, client_args: Optional[dict[str, Any]] = None, **model_config:
5354
client_args: Arguments for the Writer client (e.g., api_key, base_url, timeout, etc.).
5455
**model_config: Configuration options for the Writer model.
5556
"""
57+
validate_config_keys(model_config, self.WriterConfig)
5658
self.config = WriterModel.WriterConfig(**model_config)
5759

5860
logger.debug("config=<%s> | initializing", self.config)
@@ -67,6 +69,7 @@ def update_config(self, **model_config: Unpack[WriterConfig]) -> None: # type:
6769
Args:
6870
**model_config: Configuration overrides.
6971
"""
72+
validate_config_keys(model_config, self.WriterConfig)
7073
self.config.update(model_config)
7174

7275
@override

0 commit comments

Comments
 (0)