Skip to content

Commit

Permalink
Enable defining new functions after agent creation
Browse files Browse the repository at this point in the history
  • Loading branch information
elecnix committed Oct 19, 2023
1 parent 45677e3 commit e81355e
Show file tree
Hide file tree
Showing 2 changed files with 57 additions and 0 deletions.
12 changes: 12 additions & 0 deletions autogen/agentchat/conversable_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -1017,3 +1017,15 @@ def register_function(self, function_map: Dict[str, Callable]):
function_map: a dictionary mapping function names to functions.
"""
self._function_map.update(function_map)

def define_function(self, signature: Dict):
"""Define a function in the LLM configuration.
Args:
signature: description of the function to provide to the model. See: https://platform.openai.com/docs/api-reference/chat/create#chat/create-functions
"""
if not self.llm_config:
error_msg = "To define a function, agent must have an llm_config"
logger.error(error_msg)
raise AssertionError(error_msg)
self.llm_config["functions"].append(signature)
45 changes: 45 additions & 0 deletions test/test_with_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,5 +57,50 @@ def get_random_number():
user_proxy.initiate_chat(manager, message="Let's start the game!")


@pytest.mark.skipif(
skip or not sys.version.startswith("3.10"),
reason="do not run if openai is not installed or py!=3.10",
)
def test_define_function():
config_list_gpt4 = autogen.config_list_from_json(
"OAI_CONFIG_LIST",
filter_dict={
"model": ["gpt-4", "gpt-4-0314", "gpt4", "gpt-4-32k", "gpt-4-32k-0314", "gpt-4-32k-v0314"],
},
)
llm_config = {
"config_list": config_list_gpt4,
"seed": 42,
"functions": [],
}

user_proxy = autogen.UserProxyAgent(name="user_proxy", human_input_mode="NEVER")
assistant = autogen.AssistantAgent(name="test", llm_config=llm_config)

# Define a new function *after* the assistant has been created
assistant.define_function(
{
"name": "greet_user",
"description": "Greets the user.",
"parameters": {
"type": "object",
"properties": {},
"required": [],
},
}
)

user_proxy.initiate_chat(
assistant,
message="What functions do you know about in the context of this conversation? End your response with 'TERMINATE'.",
)
messages = assistant.chat_messages[user_proxy]
print(messages)

# The model should know about the function in the context of the conversation
assert "greet_user" in messages[1]["content"]


if __name__ == "__main__":
test_function_call_groupchat()
test_define_function()

0 comments on commit e81355e

Please sign in to comment.