From 9d7eca6d771a0efcbc5bf09e8244b712ec7bbfd6 Mon Sep 17 00:00:00 2001 From: Chi Wang Date: Fri, 2 Feb 2024 14:07:00 +0000 Subject: [PATCH] change code_execution_config default --- autogen/agentchat/assistant_agent.py | 2 -- autogen/agentchat/conversable_agent.py | 5 +++-- autogen/version.py | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/autogen/agentchat/assistant_agent.py b/autogen/agentchat/assistant_agent.py index ffb2e598d658..bdec0fef6656 100644 --- a/autogen/agentchat/assistant_agent.py +++ b/autogen/agentchat/assistant_agent.py @@ -37,7 +37,6 @@ def __init__( is_termination_msg: Optional[Callable[[Dict], bool]] = None, max_consecutive_auto_reply: Optional[int] = None, human_input_mode: Optional[str] = "NEVER", - code_execution_config: Optional[Union[Dict, Literal[False]]] = False, description: Optional[str] = None, **kwargs, ): @@ -64,7 +63,6 @@ def __init__( is_termination_msg, max_consecutive_auto_reply, human_input_mode, - code_execution_config=code_execution_config, llm_config=llm_config, description=description, **kwargs, diff --git a/autogen/agentchat/conversable_agent.py b/autogen/agentchat/conversable_agent.py index 348f14f54a0b..ecb1c996599c 100644 --- a/autogen/agentchat/conversable_agent.py +++ b/autogen/agentchat/conversable_agent.py @@ -70,7 +70,7 @@ def __init__( max_consecutive_auto_reply: Optional[int] = None, human_input_mode: Optional[str] = "TERMINATE", function_map: Optional[Dict[str, Callable]] = None, - code_execution_config: Union[Dict, Literal[False]] = {}, + code_execution_config: Union[Dict, Literal[False]] = False, llm_config: Optional[Union[Dict, Literal[False]]] = None, default_auto_reply: Optional[Union[str, Dict, None]] = "", description: Optional[str] = None, @@ -108,7 +108,8 @@ def __init__( If False, the code will be executed in the current environment. We strongly recommend using docker for code execution. - timeout (Optional, int): The maximum execution time in seconds. - - last_n_messages (Experimental, Optional, int or str): The number of messages to look back for code execution. If set to 'auto', it will scan backwards through all messages arriving since the agent last spoke, which is typically the last time execution was attempted. (Default: auto) + - last_n_messages (Experimental, int or str): The number of messages to look back for code execution. + If set to 'auto', it will scan backwards through all messages arriving since the agent last spoke, which is typically the last time execution was attempted. (Default: auto) llm_config (dict or False): llm inference configuration. Please refer to [OpenAIWrapper.create](/docs/reference/oai/client#create) for available options. diff --git a/autogen/version.py b/autogen/version.py index 75cf7831c499..6232f7ab18d0 100644 --- a/autogen/version.py +++ b/autogen/version.py @@ -1 +1 @@ -__version__ = "0.2.9" +__version__ = "0.2.10"