From ca09533a9364e510fc70cceb775742c23d72a252 Mon Sep 17 00:00:00 2001 From: kevin666aa Date: Thu, 28 Sep 2023 21:33:23 -0400 Subject: [PATCH 1/7] fix append_oai_message --- autogen/agentchat/conversable_agent.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/autogen/agentchat/conversable_agent.py b/autogen/agentchat/conversable_agent.py index fdf42f67b6a..50d792ee8d1 100644 --- a/autogen/agentchat/conversable_agent.py +++ b/autogen/agentchat/conversable_agent.py @@ -275,6 +275,8 @@ def _append_oai_message(self, message: Union[Dict, str], role, conversation_id: return False oai_message["role"] = "function" if message.get("role") == "function" else role + if "function_call" in oai_message: + oai_message["role"] = "assistant" # only messages with role 'assistant' can have a function call. self._oai_messages[conversation_id].append(oai_message) return True From 8067883950d8aa1f3498b0bfd6cb046437caac2a Mon Sep 17 00:00:00 2001 From: kevin666aa Date: Mon, 2 Oct 2023 09:51:58 -0400 Subject: [PATCH 2/7] add testcase for groupchat --- test/agentchat/test_groupchat.py | 45 ++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/test/agentchat/test_groupchat.py b/test/agentchat/test_groupchat.py index 5c5d3fb8257..62546cb032e 100644 --- a/test/agentchat/test_groupchat.py +++ b/test/agentchat/test_groupchat.py @@ -61,7 +61,52 @@ def test_plugin(): assert len(groupchat.messages) == 2 +def test_function_call(): + import random + + def get_random_number(): + return random.randint(0, 100) + + config_list_gpt4 = autogen.config_list_from_json( + "OAI_CONFIG_LIST", + filter_dict={ + "model": ["gpt-4", "gpt-4-0314", "gpt4", "gpt-4-32k", "gpt-4-32k-0314", "gpt-4-32k-v0314"], + }, + ) + llm_config = { + "config_list": config_list_gpt4, + "seed": 42, + "functions": [ + { + "name": "get_random_number", + "description": "Get a random number between 0 and 100", + "parameters": { + "type": "object", + "properties": {}, + "required": [], + }, + }, + ], + } + user_proxy = autogen.UserProxyAgent( + name="User_proxy", + system_message="A human admin that will execute code.", + function_map={"get_random_number": get_random_number}, + human_input_mode="NEVER", + ) + coder = autogen.AssistantAgent( + name="Player", + system_message="You will can function 'get_random_number' to get a random number. Reply 'TERMINATE' when you get at least 1 even number and 1 odd number.", + llm_config=llm_config, + ) + groupchat = autogen.GroupChat(agents=[user_proxy, coder], messages=[], max_round=10) + manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=llm_config) + + user_proxy.initiate_chat(manager, message="Let's start the game!") + + if __name__ == "__main__": # test_broadcast() # test_chat_manager() test_plugin() + test_function_call() From 91ccc153e02705d9e7dd7a25a1205468e002d3e9 Mon Sep 17 00:00:00 2001 From: kevin666aa Date: Mon, 2 Oct 2023 10:15:24 -0400 Subject: [PATCH 3/7] add test_oai to openai workflow --- .github/workflows/openai.yml | 1 + test/agentchat/test_groupchat.py | 45 ------------------------------ test/test_with_openai.py | 47 ++++++++++++++++++++++++++++++++ 3 files changed, 48 insertions(+), 45 deletions(-) create mode 100644 test/test_with_openai.py diff --git a/.github/workflows/openai.yml b/.github/workflows/openai.yml index eef8e4ce6fb..1e0ecca7d2a 100644 --- a/.github/workflows/openai.yml +++ b/.github/workflows/openai.yml @@ -69,6 +69,7 @@ jobs: OAI_CONFIG_LIST: ${{ secrets.OAI_CONFIG_LIST }} run: | pip install nbconvert nbformat ipykernel + coverage run -a -m pytest test/test_with_openai.py coverage run -a -m pytest test/test_notebook.py coverage xml cat "$(pwd)/test/executed_openai_notebook_output.txt" diff --git a/test/agentchat/test_groupchat.py b/test/agentchat/test_groupchat.py index 62546cb032e..5c5d3fb8257 100644 --- a/test/agentchat/test_groupchat.py +++ b/test/agentchat/test_groupchat.py @@ -61,52 +61,7 @@ def test_plugin(): assert len(groupchat.messages) == 2 -def test_function_call(): - import random - - def get_random_number(): - return random.randint(0, 100) - - config_list_gpt4 = autogen.config_list_from_json( - "OAI_CONFIG_LIST", - filter_dict={ - "model": ["gpt-4", "gpt-4-0314", "gpt4", "gpt-4-32k", "gpt-4-32k-0314", "gpt-4-32k-v0314"], - }, - ) - llm_config = { - "config_list": config_list_gpt4, - "seed": 42, - "functions": [ - { - "name": "get_random_number", - "description": "Get a random number between 0 and 100", - "parameters": { - "type": "object", - "properties": {}, - "required": [], - }, - }, - ], - } - user_proxy = autogen.UserProxyAgent( - name="User_proxy", - system_message="A human admin that will execute code.", - function_map={"get_random_number": get_random_number}, - human_input_mode="NEVER", - ) - coder = autogen.AssistantAgent( - name="Player", - system_message="You will can function 'get_random_number' to get a random number. Reply 'TERMINATE' when you get at least 1 even number and 1 odd number.", - llm_config=llm_config, - ) - groupchat = autogen.GroupChat(agents=[user_proxy, coder], messages=[], max_round=10) - manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=llm_config) - - user_proxy.initiate_chat(manager, message="Let's start the game!") - - if __name__ == "__main__": # test_broadcast() # test_chat_manager() test_plugin() - test_function_call() diff --git a/test/test_with_openai.py b/test/test_with_openai.py new file mode 100644 index 00000000000..88bf471b9ca --- /dev/null +++ b/test/test_with_openai.py @@ -0,0 +1,47 @@ +import autogen + +def test_function_call_groupchat(): + import random + + def get_random_number(): + return random.randint(0, 100) + + config_list_gpt4 = autogen.config_list_from_json( + "OAI_CONFIG_LIST", + filter_dict={ + "model": ["gpt-4", "gpt-4-0314", "gpt4", "gpt-4-32k", "gpt-4-32k-0314", "gpt-4-32k-v0314"], + }, + ) + llm_config = { + "config_list": config_list_gpt4, + "seed": 42, + "functions": [ + { + "name": "get_random_number", + "description": "Get a random number between 0 and 100", + "parameters": { + "type": "object", + "properties": {}, + "required": [], + }, + }, + ], + } + user_proxy = autogen.UserProxyAgent( + name="User_proxy", + system_message="A human admin that will execute code.", + function_map={"get_random_number": get_random_number}, + human_input_mode="NEVER", + ) + coder = autogen.AssistantAgent( + name="Player", + system_message="You will can function 'get_random_number' to get a random number. Reply 'TERMINATE' when you get at least 1 even number and 1 odd number.", + llm_config=llm_config, + ) + groupchat = autogen.GroupChat(agents=[user_proxy, coder], messages=[], max_round=10) + manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=llm_config) + + user_proxy.initiate_chat(manager, message="Let's start the game!") + +if __name__ == "__main__": + test_function_call_groupchat() \ No newline at end of file From 98d5e9e2474d63253755e8dfb03c5f45574d60ab Mon Sep 17 00:00:00 2001 From: kevin666aa Date: Mon, 2 Oct 2023 10:16:02 -0400 Subject: [PATCH 4/7] code formate --- test/test_with_openai.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/test/test_with_openai.py b/test/test_with_openai.py index 88bf471b9ca..c2db1a54a3b 100644 --- a/test/test_with_openai.py +++ b/test/test_with_openai.py @@ -1,5 +1,6 @@ import autogen + def test_function_call_groupchat(): import random @@ -43,5 +44,6 @@ def get_random_number(): user_proxy.initiate_chat(manager, message="Let's start the game!") + if __name__ == "__main__": - test_function_call_groupchat() \ No newline at end of file + test_function_call_groupchat() From cf4e575025ef353abaf915ad7e708f87bab29fd1 Mon Sep 17 00:00:00 2001 From: kevin666aa Date: Mon, 2 Oct 2023 10:39:50 -0400 Subject: [PATCH 5/7] update --- test/test_with_openai.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/test/test_with_openai.py b/test/test_with_openai.py index c2db1a54a3b..ef8c5526427 100644 --- a/test/test_with_openai.py +++ b/test/test_with_openai.py @@ -1,6 +1,18 @@ import autogen +import pytest +import sys +try: + import openai + skip = False +except ImportError: + skip = True + +@pytest.mark.skipif( + skip or not sys.version.startswith("3.10"), + reason="do not run if openai is not installed or py!=3.10", +) def test_function_call_groupchat(): import random From 709789d67dc0709318d26232a652d8f1466b7cd2 Mon Sep 17 00:00:00 2001 From: kevin666aa Date: Mon, 2 Oct 2023 10:44:50 -0400 Subject: [PATCH 6/7] formate --- test/test_with_openai.py | 1 + 1 file changed, 1 insertion(+) diff --git a/test/test_with_openai.py b/test/test_with_openai.py index ef8c5526427..a96a3e1b497 100644 --- a/test/test_with_openai.py +++ b/test/test_with_openai.py @@ -1,6 +1,7 @@ import autogen import pytest import sys + try: import openai From 7388920e314544e7d4f6a4bd88ce32b89cee650b Mon Sep 17 00:00:00 2001 From: kevin666aa Date: Mon, 2 Oct 2023 12:24:22 -0400 Subject: [PATCH 7/7] update --- autogen/agentchat/conversable_agent.py | 16 ++++++++++------ test/test_with_openai.py | 7 +++---- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/autogen/agentchat/conversable_agent.py b/autogen/agentchat/conversable_agent.py index 5493815fcd6..ce3169e17f3 100644 --- a/autogen/agentchat/conversable_agent.py +++ b/autogen/agentchat/conversable_agent.py @@ -259,6 +259,7 @@ def _append_oai_message(self, message: Union[Dict, str], role, conversation_id: If the message received is a string, it will be put in the "content" field of the new dictionary. If the message received is a dictionary but does not have any of the two fields "content" or "function_call", this message is not a valid ChatCompletion message. + If only "function_call" is provided, "content" will be set to None if not provided, and the role of the message will be forced "assistant". Args: message (dict or str): message to be appended to the ChatCompletion conversation. @@ -271,8 +272,11 @@ def _append_oai_message(self, message: Union[Dict, str], role, conversation_id: message = self._message_to_dict(message) # create oai message to be appended to the oai conversation that can be passed to oai directly. oai_message = {k: message[k] for k in ("content", "function_call", "name", "context") if k in message} - if "content" not in oai_message and "function_call" not in oai_message: - return False + if "content" not in oai_message: + if "function_call" in oai_message: + oai_message["content"] = None # if only function_call is provided, content will be set to None. + else: + return False oai_message["role"] = "function" if message.get("role") == "function" else role if "function_call" in oai_message: @@ -291,8 +295,8 @@ def send( Args: message (dict or str): message to be sent. - The message could contain the following fields (either content or function_call must be provided): - - content (str): the content of the message. + The message could contain the following fields: + - content (str): Required, the content of the message. (Can be None) - function_call (str): the name of the function to be called. - name (str): the name of the function to be called. - role (str): the role of the message, any role that is not "function" @@ -340,8 +344,8 @@ async def a_send( Args: message (dict or str): message to be sent. - The message could contain the following fields (either content or function_call must be provided): - - content (str): the content of the message. + The message could contain the following fields: + - content (str): Required, the content of the message. (Can be None) - function_call (str): the name of the function to be called. - name (str): the name of the function to be called. - role (str): the role of the message, any role that is not "function" diff --git a/test/test_with_openai.py b/test/test_with_openai.py index a96a3e1b497..ed63324be6a 100644 --- a/test/test_with_openai.py +++ b/test/test_with_openai.py @@ -36,23 +36,22 @@ def get_random_number(): "parameters": { "type": "object", "properties": {}, - "required": [], }, }, ], } user_proxy = autogen.UserProxyAgent( name="User_proxy", - system_message="A human admin that will execute code.", + system_message="A human admin that will execute function_calls.", function_map={"get_random_number": get_random_number}, human_input_mode="NEVER", ) coder = autogen.AssistantAgent( name="Player", - system_message="You will can function 'get_random_number' to get a random number. Reply 'TERMINATE' when you get at least 1 even number and 1 odd number.", + system_message="You will can function `get_random_number` to get a random number. Stop only when you get at least 1 even number and 1 odd number. Reply TERMINATE to stop.", llm_config=llm_config, ) - groupchat = autogen.GroupChat(agents=[user_proxy, coder], messages=[], max_round=10) + groupchat = autogen.GroupChat(agents=[user_proxy, coder], messages=[], max_round=7) manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=llm_config) user_proxy.initiate_chat(manager, message="Let's start the game!")