Skip to content

Commit 0bca1c9

Browse files
authored
Merge branch 'microsoft:main' into shebang-python
2 parents 08560ae + d35754d commit 0bca1c9

21 files changed

+3868
-696
lines changed

.github/workflows/openai.yml

+1
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ on:
1111
- "test/**"
1212
- "notebook/agentchat_auto_feedback_from_code_execution.ipynb"
1313
- "notebook/agentchat_function_call.ipynb"
14+
- "notebook/agentchat_groupchat_finite_state_machine.ipynb"
1415
- ".github/workflows/openai.yml"
1516
permissions: {}
1617
# actions: read

.pre-commit-config.yaml

+2-1
Original file line numberDiff line numberDiff line change
@@ -47,5 +47,6 @@ repos:
4747
rev: 1.7.1
4848
hooks:
4949
- id: nbqa-ruff
50-
args: ["--fix"]
50+
# Don't require notebooks to have all imports at the top
51+
args: ["--fix", "--ignore=E402"]
5152
- id: nbqa-black

autogen/agentchat/contrib/gpt_assistant_agent.py

+8-5
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,8 @@ class GPTAssistantAgent(ConversableAgent):
2121
This agent is unique in its reliance on the OpenAI Assistant for state management, differing from other agents like ConversableAgent.
2222
"""
2323

24+
DEFAULT_MODEL_NAME = "gpt-4-0125-preview"
25+
2426
def __init__(
2527
self,
2628
name="GPT Assistant",
@@ -61,16 +63,17 @@ def __init__(
6163

6264
if llm_config is False:
6365
raise ValueError("llm_config=False is not supported for GPTAssistantAgent.")
64-
65-
# Use AutoGen OpenAIWrapper to create a client
66-
model_name = "gpt-4-0125-preview"
66+
# Use AutooGen OpenAIWrapper to create a client
6767
openai_client_cfg = copy.deepcopy(llm_config)
68+
# Use the class variable
69+
model_name = GPTAssistantAgent.DEFAULT_MODEL_NAME
70+
6871
# GPTAssistantAgent's azure_deployment param may cause NotFoundError (404) in client.beta.assistants.list()
6972
# See: https://github.com/microsoft/autogen/pull/1721
7073
if openai_client_cfg.get("config_list") is not None and len(openai_client_cfg["config_list"]) > 0:
71-
model_name = openai_client_cfg["config_list"][0].pop("model", "gpt-4-0125-preview")
74+
model_name = openai_client_cfg["config_list"][0].pop("model", GPTAssistantAgent.DEFAULT_MODEL_NAME)
7275
else:
73-
model_name = openai_client_cfg.pop("model", "gpt-4-0125-preview")
76+
model_name = openai_client_cfg.pop("model", GPTAssistantAgent.DEFAULT_MODEL_NAME)
7477

7578
logger.warning("OpenAI client config of GPTAssistantAgent(%s) - model: %s", name, model_name)
7679

autogen/agentchat/conversable_agent.py

+82-4
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
import logging
77
import re
88
from collections import defaultdict
9+
from functools import partial
910
from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Type, TypeVar, Union
1011
import warnings
1112
from openai import BadRequestError
@@ -325,6 +326,80 @@ def reply_func(
325326
if ignore_async_in_sync_chat and inspect.iscoroutinefunction(reply_func):
326327
self._ignore_async_func_in_sync_chat_list.append(reply_func)
327328

329+
@staticmethod
330+
def _summary_from_nested_chats(
331+
chat_queue: List[Dict[str, Any]], recipient: Agent, messages: Union[str, Callable], sender: Agent, config: Any
332+
) -> Tuple[bool, str]:
333+
"""A simple chat reply function.
334+
This function initiate one or a sequence of chats between the "recipient" and the agents in the
335+
chat_queue.
336+
337+
It extracts and returns a summary from the nested chat based on the "summary_method" in each chat in chat_queue.
338+
339+
Returns:
340+
Tuple[bool, str]: A tuple where the first element indicates the completion of the chat, and the second element contains the summary of the last chat if any chats were initiated.
341+
"""
342+
last_msg = messages[-1].get("content")
343+
chat_to_run = []
344+
for i, c in enumerate(chat_queue):
345+
current_c = c.copy()
346+
message = current_c.get("message")
347+
# If message is not provided in chat_queue, we by default use the last message from the original chat history as the first message in this nested chat (for the first chat in the chat queue).
348+
# NOTE: This setting is prone to change.
349+
if message is None and i == 0:
350+
message = last_msg
351+
if callable(message):
352+
message = message(recipient, messages, sender, config)
353+
# We only run chat that has a valid message. NOTE: This is prone to change dependin on applications.
354+
if message:
355+
current_c["message"] = message
356+
chat_to_run.append(current_c)
357+
if not chat_to_run:
358+
return True, None
359+
res = recipient.initiate_chats(chat_to_run)
360+
return True, res[-1].summary
361+
362+
def register_nested_chats(
363+
self,
364+
chat_queue: List[Dict[str, Any]],
365+
trigger: Union[Type[Agent], str, Agent, Callable[[Agent], bool], List] = [Agent, None],
366+
reply_func_from_nested_chats: Union[str, Callable] = "summary_from_nested_chats",
367+
position: int = 2,
368+
**kwargs,
369+
) -> None:
370+
"""Register a nested chat reply function.
371+
Args:
372+
chat_queue (list): a list of chat objects to be initiated.
373+
trigger (Agent class, str, Agent instance, callable, or list): Default to [Agent, None]. Ref to `register_reply` for details.
374+
reply_func_from_nested_chats (Callable, str): the reply function for the nested chat.
375+
The function takes a chat_queue for nested chat, recipient agent, a list of messages, a sender agent and a config as input and returns a reply message.
376+
Default to "summary_from_nested_chats", which corresponds to a built-in reply function that get summary from the nested chat_queue.
377+
```python
378+
def reply_func_from_nested_chats(
379+
chat_queue: List[Dict],
380+
recipient: ConversableAgent,
381+
messages: Optional[List[Dict]] = None,
382+
sender: Optional[Agent] = None,
383+
config: Optional[Any] = None,
384+
) -> Tuple[bool, Union[str, Dict, None]]:
385+
```
386+
position (int): Ref to `register_reply` for details. Default to 2. It means we first check the termination and human reply, then check the registered nested chat reply.
387+
kwargs: Ref to `register_reply` for details.
388+
"""
389+
if reply_func_from_nested_chats == "summary_from_nested_chats":
390+
reply_func_from_nested_chats = self._summary_from_nested_chats
391+
if not callable(reply_func_from_nested_chats):
392+
raise ValueError("reply_func_from_nested_chats must be a callable")
393+
reply_func = partial(reply_func_from_nested_chats, chat_queue)
394+
self.register_reply(
395+
trigger,
396+
reply_func,
397+
position,
398+
kwargs.get("config"),
399+
kwargs.get("reset_config"),
400+
ignore_async_in_sync_chat=kwargs.get("ignore_async_in_sync_chat"),
401+
)
402+
328403
@property
329404
def system_message(self) -> str:
330405
"""Return the system message."""
@@ -477,7 +552,7 @@ def _process_message_before_send(
477552
"""Process the message before sending it to the recipient."""
478553
hook_list = self.hook_lists["process_message_before_send"]
479554
for hook in hook_list:
480-
message = hook(message, recipient, silent)
555+
message = hook(sender=self, message=message, recipient=recipient, silent=silent)
481556
return message
482557

483558
def send(
@@ -2054,15 +2129,18 @@ async def a_generate_init_message(self, **context) -> Union[str, Dict]:
20542129
self._process_carryover(context)
20552130
return context["message"]
20562131

2057-
def register_function(self, function_map: Dict[str, Callable]):
2132+
def register_function(self, function_map: Dict[str, Union[Callable, None]]):
20582133
"""Register functions to the agent.
20592134
20602135
Args:
2061-
function_map: a dictionary mapping function names to functions.
2136+
function_map: a dictionary mapping function names to functions. if function_map[name] is None, the function will be removed from the function_map.
20622137
"""
2063-
for name in function_map.keys():
2138+
for name, func in function_map.items():
20642139
self._assert_valid_name(name)
2140+
if func is None and name not in self._function_map.keys():
2141+
warnings.warn(f"The function {name} to remove doesn't exist", name)
20652142
self._function_map.update(function_map)
2143+
self._function_map = {k: v for k, v in self._function_map.items() if v is not None}
20662144

20672145
def update_function_signature(self, func_sig: Union[str, Dict], is_remove: None):
20682146
"""update a function_signature in the LLM configuration for function_call.

notebook/agentchat_custom_model.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
"cell_type": "markdown",
66
"metadata": {},
77
"source": [
8-
"<a href=\"https://colab.research.google.com/github/microsoft/autogen/blob/main/notebook/agentchat_two_users.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
8+
"<a href=\"https://colab.research.google.com/github/microsoft/autogen/blob/main/notebook/agentchat_custom_model.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
99
]
1010
},
1111
{

notebook/agentchat_graph_modelling_language_using_select_speaker.ipynb

-668
This file was deleted.

notebook/agentchat_groupchat_finite_state_machine.ipynb

+620
Large diffs are not rendered by default.

0 commit comments

Comments
 (0)