diff --git a/autogen/agentchat/groupchat.py b/autogen/agentchat/groupchat.py index fe11ec171d25..bd5774cb1a62 100644 --- a/autogen/agentchat/groupchat.py +++ b/autogen/agentchat/groupchat.py @@ -7,12 +7,12 @@ from dataclasses import dataclass, field from typing import Callable, Dict, List, Literal, Optional, Tuple, Union -from ..oai.client import ModelClient from ..code_utils import content_str from ..exception_utils import AgentNameConflict, NoEligibleSpeaker, UndefinedNextAgent from ..formatting_utils import colored from ..graph_utils import check_graph_validity, invert_disallowed_to_allowed from ..io.base import IOStream +from ..oai.client import ModelClient from ..runtime_logging import log_new_agent, logging_enabled from .agent import Agent from .conversable_agent import ConversableAgent @@ -103,15 +103,15 @@ def custom_speaker_selection_func( messages: List[Dict] llm_config: Optional[Union[Dict, Literal[False]]] = None model_client_cls: Optional[Union[ModelClient, List[ModelClient]]] = None - max_round: Optional[int] = 10 - admin_name: Optional[str] = "Admin" - func_call_filter: Optional[bool] = True + max_round: int = 10 + admin_name: str = "Admin" + func_call_filter: bool = True speaker_selection_method: Union[Literal["auto", "manual", "random", "round_robin"], Callable] = "auto" - max_retries_for_selecting_speaker: Optional[int] = 2 + max_retries_for_selecting_speaker: int = 2 allow_repeat_speaker: Optional[Union[bool, List[Agent]]] = None allowed_or_disallowed_speaker_transitions: Optional[Dict] = None speaker_transitions_type: Literal["allowed", "disallowed", None] = None - enable_clear_history: Optional[bool] = False + enable_clear_history: bool = False send_introductions: bool = False select_speaker_message_template: str = """You are in a role play game. The following roles are available: {roles}. @@ -564,7 +564,7 @@ def _finalize_speaker(self, last_speaker: Agent, final: bool, name: str, agents: return agent if agent else self.next_agent(last_speaker, agents) def _register_client_from_config(self, agent: Agent, config: Dict): - model_client_cls_to_match = config.get('model_client_cls') + model_client_cls_to_match = config.get("model_client_cls") if model_client_cls_to_match: if not self.model_client_cls: raise ValueError( @@ -599,14 +599,13 @@ def _register_custom_model_clients(self, agent: ConversableAgent): if not self.llm_config: return - config_format_is_list = 'config_list' in self.llm_config.keys() + config_format_is_list = "config_list" in self.llm_config.keys() if config_format_is_list: - for config in self.llm_config['config_list']: + for config in self.llm_config["config_list"]: self._register_client_from_config(agent, config) elif not config_format_is_list: self._register_client_from_config(agent, self.llm_config) - def _auto_select_speaker( self, last_speaker: Agent, diff --git a/website/docs/topics/groupchat/using_custom_models.md b/website/docs/topics/groupchat/using_custom_models.md index 966e311ccc90..4948d8ab9f6e 100644 --- a/website/docs/topics/groupchat/using_custom_models.md +++ b/website/docs/topics/groupchat/using_custom_models.md @@ -1,11 +1,11 @@ # Using Custom Models -When using `GroupChatManager` we need to pass a `GroupChat` object in the constructor, a dataclass responsible for -gathering agents, preparing messages from prompt templates and selecting speakers +When using `GroupChatManager` we need to pass a `GroupChat` object in the constructor, a dataclass responsible for +gathering agents, preparing messages from prompt templates and selecting speakers (eventually using `speaker_selection_method` as described [here](customized_speaker_selection)). To do so GroupChat internally initializes two instances of ConversableAgent. -In order to control the model clients used by the agents instantiated within the GroupChat, which already receives the +In order to control the model clients used by the agents instantiated within the GroupChat, which already receives the `llm_config` passed to GroupChatManager, the optional `model_client_cls` attribute can be set. @@ -49,13 +49,13 @@ user_proxy = UserProxyAgent(name="user", llm_config=llm_config, code_execution_c user_proxy.register_model_client(MyModelClient) ``` -Note that the agents definition illustrated here is minimal and might not suit your needs. The only aim is to show a +Note that the agents definition illustrated here is minimal and might not suit your needs. The only aim is to show a basic setup for a group chat scenario. We then create a `GroupChat` and, if we want the underlying agents used by GroupChat to use our - custom client, we will pass it in the `model_client_cls` attribute. +custom client, we will pass it in the `model_client_cls` attribute. -Finally we create an instance of `GroupChatManager` and pass the config to it. This same config will be forwarded to +Finally we create an instance of `GroupChatManager` and pass the config to it. This same config will be forwarded to the GroupChat, that (if needed) will automatically handle registration of custom models only. ```python @@ -71,9 +71,9 @@ user_proxy.initiate_chat(chat_manager, initial_message="Suggest me the most tren ``` -This attribute can either be a class or a list of classes which adheres to the `ModelClient` protocol (see -[this link](../non-openai-models/about-using-nonopenai-models) for more info about defining a custom model client +This attribute can either be a class or a list of classes which adheres to the `ModelClient` protocol (see +[this link](../non-openai-models/about-using-nonopenai-models) for more info about defining a custom model client class). -Note that it is not necessary to define a `model_client_cls` when working with Azure OpenAI, OpenAI or other non-custom +Note that it is not necessary to define a `model_client_cls` when working with Azure OpenAI, OpenAI or other non-custom models natively supported by the library.