From e81355ef76aac9aa2fa8d347eeec2a97ffddb25f Mon Sep 17 00:00:00 2001 From: Nicolas Marchildon Date: Wed, 18 Oct 2023 12:19:02 -0400 Subject: [PATCH] Enable defining new functions after agent creation --- autogen/agentchat/conversable_agent.py | 12 +++++++ test/test_with_openai.py | 45 ++++++++++++++++++++++++++ 2 files changed, 57 insertions(+) diff --git a/autogen/agentchat/conversable_agent.py b/autogen/agentchat/conversable_agent.py index 87845f910d17..38591a6af3a2 100644 --- a/autogen/agentchat/conversable_agent.py +++ b/autogen/agentchat/conversable_agent.py @@ -1017,3 +1017,15 @@ def register_function(self, function_map: Dict[str, Callable]): function_map: a dictionary mapping function names to functions. """ self._function_map.update(function_map) + + def define_function(self, signature: Dict): + """Define a function in the LLM configuration. + + Args: + signature: description of the function to provide to the model. See: https://platform.openai.com/docs/api-reference/chat/create#chat/create-functions + """ + if not self.llm_config: + error_msg = "To define a function, agent must have an llm_config" + logger.error(error_msg) + raise AssertionError(error_msg) + self.llm_config["functions"].append(signature) diff --git a/test/test_with_openai.py b/test/test_with_openai.py index ed63324be6af..56eb7d8b8c3d 100644 --- a/test/test_with_openai.py +++ b/test/test_with_openai.py @@ -57,5 +57,50 @@ def get_random_number(): user_proxy.initiate_chat(manager, message="Let's start the game!") +@pytest.mark.skipif( + skip or not sys.version.startswith("3.10"), + reason="do not run if openai is not installed or py!=3.10", +) +def test_define_function(): + config_list_gpt4 = autogen.config_list_from_json( + "OAI_CONFIG_LIST", + filter_dict={ + "model": ["gpt-4", "gpt-4-0314", "gpt4", "gpt-4-32k", "gpt-4-32k-0314", "gpt-4-32k-v0314"], + }, + ) + llm_config = { + "config_list": config_list_gpt4, + "seed": 42, + "functions": [], + } + + user_proxy = autogen.UserProxyAgent(name="user_proxy", human_input_mode="NEVER") + assistant = autogen.AssistantAgent(name="test", llm_config=llm_config) + + # Define a new function *after* the assistant has been created + assistant.define_function( + { + "name": "greet_user", + "description": "Greets the user.", + "parameters": { + "type": "object", + "properties": {}, + "required": [], + }, + } + ) + + user_proxy.initiate_chat( + assistant, + message="What functions do you know about in the context of this conversation? End your response with 'TERMINATE'.", + ) + messages = assistant.chat_messages[user_proxy] + print(messages) + + # The model should know about the function in the context of the conversation + assert "greet_user" in messages[1]["content"] + + if __name__ == "__main__": test_function_call_groupchat() + test_define_function()