Skip to content

Commit 021600a

Browse files
qingyun-wusonichi
andauthored
Allow limiting the maximum number of turns in initiate_chat and initiate_chats (microsoft#1703)
* max_turns * add notebook * simplify code * groupchat _prepare_chat * doc * add doc * Update autogen/agentchat/conversable_agent.py --------- Co-authored-by: Chi Wang <[email protected]>
1 parent aa806d6 commit 021600a

10 files changed

+1106
-464
lines changed

autogen/agentchat/chat.py

+30-24
Original file line numberDiff line numberDiff line change
@@ -34,31 +34,37 @@ def initiate_chats(chat_queue: List[Dict[str, Any]]) -> List[ChatResult]:
3434
3535
args:
3636
chat_queue (List[Dict]): a list of dictionaries containing the information of the chats.
37-
Each dictionary should contain the following fields:
37+
Each dictionary should contain the input arguments for `ConversableAgent.initiate_chat`.
38+
More specifically, each dictionary could include the following fields:
39+
recipient: the recipient agent.
40+
- "sender": the sender agent.
3841
- "recipient": the recipient agent.
39-
- "context": any context information, e.g., the request message. The following fields are reserved:
40-
"message" needs to be provided if the `generate_init_message` method is not overridden.
41-
Otherwise, input() will be called to get the initial message.
42-
"summary_method": a string or callable specifying the method to get a summary from the chat. Default is DEFAULT_summary_method, i.e., "last_msg".
43-
- Supported string are "last_msg" and "reflection_with_llm":
44-
when set "last_msg", it returns the last message of the dialog as the summary.
45-
when set "reflection_with_llm", it returns a summary extracted using an llm client.
46-
`llm_config` must be set in either the recipient or sender.
47-
"reflection_with_llm" requires the llm_config to be set in either the sender or the recipient.
48-
- A callable summary_method should take the recipient and sender agent in a chat as input and return a string of summary. E.g,
49-
```python
50-
def my_summary_method(
51-
sender: ConversableAgent,
52-
recipient: ConversableAgent,
53-
):
54-
return recipient.last_message(sender)["content"]
55-
```
56-
"summary_prompt" can be used to specify the prompt used to extract a summary when summary_method is "reflection_with_llm".
57-
Default is None and the following default prompt will be used when "summary_method" is set to "reflection_with_llm":
58-
"Identify and extract the final solution to the originally asked question based on the conversation."
59-
"carryover" can be used to specify the carryover information to be passed to this chat.
60-
If provided, we will combine this carryover with the "message" content when generating the initial chat
61-
message in `generate_init_message`.
42+
- clear_history (bool): whether to clear the chat history with the agent. Default is True.
43+
- silent (bool or None): (Experimental) whether to print the messages for this conversation. Default is False.
44+
- cache (Cache or None): the cache client to be used for this conversation. Default is None.
45+
- max_turns (int or None): the maximum number of turns for the chat. If None, the chat will continue until a termination condition is met. Default is None.
46+
- "message" needs to be provided if the `generate_init_message` method is not overridden.
47+
Otherwise, input() will be called to get the initial message.
48+
- "summary_method": a string or callable specifying the method to get a summary from the chat. Default is DEFAULT_summary_method, i.e., "last_msg".
49+
- Supported string are "last_msg" and "reflection_with_llm":
50+
when set "last_msg", it returns the last message of the dialog as the summary.
51+
when set "reflection_with_llm", it returns a summary extracted using an llm client.
52+
`llm_config` must be set in either the recipient or sender.
53+
"reflection_with_llm" requires the llm_config to be set in either the sender or the recipient.
54+
- A callable summary_method should take the recipient and sender agent in a chat as input and return a string of summary. E.g,
55+
```python
56+
def my_summary_method(
57+
sender: ConversableAgent,
58+
recipient: ConversableAgent,
59+
):
60+
return recipient.last_message(sender)["content"]
61+
```
62+
"summary_prompt" can be used to specify the prompt used to extract a summary when summary_method is "reflection_with_llm".
63+
Default is None and the following default prompt will be used when "summary_method" is set to "reflection_with_llm":
64+
"Identify and extract the final solution to the originally asked question based on the conversation."
65+
"carryover" can be used to specify the carryover information to be passed to this chat.
66+
If provided, we will combine this carryover with the "message" content when generating the initial chat
67+
message in `generate_init_message`.
6268
6369
6470
returns:

autogen/agentchat/conversable_agent.py

+44-49
Original file line numberDiff line numberDiff line change
@@ -519,13 +519,6 @@ def send(
519519
"Message can't be converted into a valid ChatCompletion message. Either content or function_call must be provided."
520520
)
521521

522-
chat_result = ChatResult(
523-
chat_history=self.chat_messages[recipient],
524-
cost=gather_usage_summary([self, recipient]),
525-
human_input=self._human_input,
526-
)
527-
return chat_result
528-
529522
async def a_send(
530523
self,
531524
message: Union[Dict, str],
@@ -578,13 +571,6 @@ async def a_send(
578571
"Message can't be converted into a valid ChatCompletion message. Either content or function_call must be provided."
579572
)
580573

581-
chat_result = ChatResult(
582-
chat_history=self.chat_messages[recipient],
583-
cost=gather_usage_summary([self, recipient]),
584-
human_input=self._human_input,
585-
)
586-
return chat_result
587-
588574
def _print_received_message(self, message: Union[Dict, str], sender: Agent):
589575
# print the message received
590576
print(colored(sender.name, "yellow"), "(to", f"{self.name}):\n", flush=True)
@@ -729,14 +715,20 @@ async def a_receive(
729715
if reply is not None:
730716
await self.a_send(reply, sender, silent=silent)
731717

732-
def _prepare_chat(self, recipient: "ConversableAgent", clear_history: bool, prepare_recipient: bool = True) -> None:
718+
def _prepare_chat(
719+
self,
720+
recipient: "ConversableAgent",
721+
clear_history: bool,
722+
prepare_recipient: bool = True,
723+
reply_at_receive: bool = True,
724+
) -> None:
733725
self.reset_consecutive_auto_reply_counter(recipient)
734-
self.reply_at_receive[recipient] = True
726+
self.reply_at_receive[recipient] = reply_at_receive
735727
if clear_history:
736728
self.clear_history(recipient)
737729
self._human_input = []
738730
if prepare_recipient:
739-
recipient._prepare_chat(self, clear_history, False)
731+
recipient._prepare_chat(self, clear_history, False, reply_at_receive)
740732

741733
def _raise_exception_on_async_reply_functions(self) -> None:
742734
"""Raise an exception if any async reply functions are registered.
@@ -763,6 +755,7 @@ def initiate_chat(
763755
clear_history: Optional[bool] = True,
764756
silent: Optional[bool] = False,
765757
cache: Optional[Cache] = None,
758+
max_turns: Optional[int] = None,
766759
**context,
767760
) -> ChatResult:
768761
"""Initiate a chat with the recipient agent.
@@ -773,9 +766,12 @@ def initiate_chat(
773766
774767
Args:
775768
recipient: the recipient agent.
776-
clear_history (bool): whether to clear the chat history with the agent.
777-
silent (bool or None): (Experimental) whether to print the messages for this conversation.
778-
cache (Cache or None): the cache client to be used for this conversation.
769+
clear_history (bool): whether to clear the chat history with the agent. Default is True.
770+
silent (bool or None): (Experimental) whether to print the messages for this conversation. Default is False.
771+
cache (Cache or None): the cache client to be used for this conversation. Default is None.
772+
max_turns (int or None): the maximum number of turns for the chat between the two agents. One turn means one conversation round trip. Note that this is different from
773+
[max_consecutive_auto_reply](#max_consecutive_auto_reply) which is the maximum number of consecutive auto replies; and it is also different from [max_rounds in GroupChat](./groupchat#groupchat-objects) which is the maximum number of rounds in a group chat session.
774+
If max_turns is set to None, the chat will continue until a termination condition is met. Default is None.
779775
**context: any context information. It has the following reserved fields:
780776
"message": a str of message. Needs to be provided. Otherwise, input() will be called to get the initial message.
781777
"summary_method": a string or callable specifying the method to get a summary from the chat. Default is DEFAULT_summary_method, i.e., "last_msg".
@@ -812,8 +808,19 @@ def my_summary_method(
812808
agent._raise_exception_on_async_reply_functions()
813809
agent.previous_cache = agent.client_cache
814810
agent.client_cache = cache
815-
self._prepare_chat(recipient, clear_history)
816-
self.send(self.generate_init_message(**context), recipient, silent=silent)
811+
if isinstance(max_turns, int):
812+
self._prepare_chat(recipient, clear_history, reply_at_receive=False)
813+
for _ in range(max_turns):
814+
if _ == 0:
815+
msg2send = self.generate_init_message(**context)
816+
else:
817+
msg2send = self.generate_reply(messages=self.chat_messages[recipient], sender=recipient)
818+
if msg2send is None:
819+
break
820+
self.send(msg2send, recipient, request_reply=True, silent=silent)
821+
else:
822+
self._prepare_chat(recipient, clear_history)
823+
self.send(self.generate_init_message(**context), recipient, silent=silent)
817824
summary = self._summarize_chat(
818825
context.get("summary_method", ConversableAgent.DEFAULT_summary_method),
819826
recipient,
@@ -837,6 +844,7 @@ async def a_initiate_chat(
837844
clear_history: Optional[bool] = True,
838845
silent: Optional[bool] = False,
839846
cache: Optional[Cache] = None,
847+
max_turns: Optional[int] = None,
840848
**context,
841849
) -> ChatResult:
842850
"""(async) Initiate a chat with the recipient agent.
@@ -853,11 +861,22 @@ async def a_initiate_chat(
853861
_chat_info = context.copy()
854862
_chat_info["recipient"] = recipient
855863
consolidate_chat_info(_chat_info, uniform_sender=self)
856-
self._prepare_chat(recipient, clear_history)
857864
for agent in [self, recipient]:
858865
agent.previous_cache = agent.client_cache
859866
agent.client_cache = cache
860-
await self.a_send(await self.a_generate_init_message(**context), recipient, silent=silent)
867+
if isinstance(max_turns, int):
868+
self._prepare_chat(recipient, clear_history, reply_at_receive=False)
869+
for _ in range(max_turns):
870+
if _ == 0:
871+
msg2send = await self.a_generate_init_message(**context)
872+
else:
873+
msg2send = await self.a_generate_reply(messages=self.chat_messages[recipient], sender=recipient)
874+
if msg2send is None:
875+
break
876+
await self.a_send(msg2send, recipient, request_reply=True, silent=silent)
877+
else:
878+
self._prepare_chat(recipient, clear_history)
879+
await self.a_send(await self.a_generate_init_message(**context), recipient, silent=silent)
861880
summary = self._summarize_chat(
862881
context.get("summary_method", ConversableAgent.DEFAULT_summary_method),
863882
recipient,
@@ -956,31 +975,7 @@ def initiate_chats(self, chat_queue: List[Dict[str, Any]]) -> List[ChatResult]:
956975
957976
Args:
958977
chat_queue (List[Dict]): a list of dictionaries containing the information of the chats.
959-
Each dictionary should contain the following fields:
960-
- "recipient": the recipient agent.
961-
- "context": any context information, e.g., the request message. The following fields are reserved:
962-
"message" needs to be provided if the `generate_init_message` method is not overridden.
963-
Otherwise, input() will be called to get the initial message.
964-
"summary_method": a string or callable specifying the method to get a summary from the chat. Default is DEFAULT_summary_method, i.e., "last_msg".
965-
- Supported string are "last_msg" and "reflection_with_llm":
966-
when set "last_msg", it returns the last message of the dialog as the summary.
967-
when set "reflection_with_llm", it returns a summary extracted using an llm client.
968-
`llm_config` must be set in either the recipient or sender.
969-
"reflection_with_llm" requires the llm_config to be set in either the sender or the recipient.
970-
- A callable summary_method should take the recipient and sender agent in a chat as input and return a string of summary. E.g,
971-
```python
972-
def my_summary_method(
973-
sender: ConversableAgent,
974-
recipient: ConversableAgent,
975-
):
976-
return recipient.last_message(sender)["content"]
977-
```
978-
"summary_prompt" can be used to specify the prompt used to extract a summary when summary_method is "reflection_with_llm".
979-
Default is None and the following default prompt will be used when "summary_method" is set to "reflection_with_llm":
980-
"Identify and extract the final solution to the originally asked question based on the conversation."
981-
"carryover" can be used to specify the carryover information to be passed to this chat.
982-
If provided, we will combine this carryover with the "message" content when generating the initial chat
983-
message in `generate_init_message`.
978+
Each dictionary should contain the input arguments for [`initiate_chat`](conversable_agent#initiate_chat)
984979
985980
Returns: a list of ChatResult objects corresponding to the finished chats in the chat_queue.
986981
"""

autogen/agentchat/groupchat.py

+9-3
Original file line numberDiff line numberDiff line change
@@ -507,15 +507,21 @@ def chat_messages_for_summary(self, agent: Agent) -> List[Dict]:
507507
"""
508508
return self._groupchat.messages
509509

510-
def _prepare_chat(self, recipient: ConversableAgent, clear_history: bool, prepare_recipient: bool = True) -> None:
511-
super()._prepare_chat(recipient, clear_history, prepare_recipient)
510+
def _prepare_chat(
511+
self,
512+
recipient: ConversableAgent,
513+
clear_history: bool,
514+
prepare_recipient: bool = True,
515+
reply_at_receive: bool = True,
516+
) -> None:
517+
super()._prepare_chat(recipient, clear_history, prepare_recipient, reply_at_receive)
512518

513519
if clear_history:
514520
self._groupchat.reset()
515521

516522
for agent in self._groupchat.agents:
517523
if (recipient != agent or prepare_recipient) and isinstance(agent, ConversableAgent):
518-
agent._prepare_chat(self, clear_history, False)
524+
agent._prepare_chat(self, clear_history, False, reply_at_receive)
519525

520526
def run_chat(
521527
self,

0 commit comments

Comments
 (0)