Skip to content

Commit d340159

Browse files
Fix some type annotations and edge cases (#572)
* Fix some type annotations in agents This fixes some errors in type annotations of `ConversableAgent`, `UserProxyAgent`, `GroupChat` and `AssistantAgent` by adjusting the type signature according to the actual implementation. There should be no change in code behavior. * Fix agent types in `GroupChat` Some `Agent`s are actually required to be `ConversableAgent` because they are used as one. * Convert str message to dict before printing message * Revert back to Agent for GroupChat * GroupChat revert update --------- Co-authored-by: Beibin Li <[email protected]> Co-authored-by: Beibin Li <[email protected]>
1 parent fe00925 commit d340159

File tree

4 files changed

+26
-19
lines changed

4 files changed

+26
-19
lines changed

autogen/agentchat/assistant_agent.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from .conversable_agent import ConversableAgent
2-
from typing import Callable, Dict, Optional, Union
2+
from typing import Callable, Dict, Literal, Optional, Union
33

44

55
class AssistantAgent(ConversableAgent):
@@ -30,11 +30,11 @@ def __init__(
3030
self,
3131
name: str,
3232
system_message: Optional[str] = DEFAULT_SYSTEM_MESSAGE,
33-
llm_config: Optional[Union[Dict, bool]] = None,
33+
llm_config: Optional[Union[Dict, Literal[False]]] = None,
3434
is_termination_msg: Optional[Callable[[Dict], bool]] = None,
3535
max_consecutive_auto_reply: Optional[int] = None,
3636
human_input_mode: Optional[str] = "NEVER",
37-
code_execution_config: Optional[Union[Dict, bool]] = False,
37+
code_execution_config: Optional[Union[Dict, Literal[False]]] = False,
3838
**kwargs,
3939
):
4040
"""

autogen/agentchat/conversable_agent.py

+17-11
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import copy
44
import json
55
import logging
6-
from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union
6+
from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Type, Union
77
from autogen import OpenAIWrapper
88
from .agent import Agent
99
from autogen.code_utils import (
@@ -45,6 +45,8 @@ class ConversableAgent(Agent):
4545
}
4646
MAX_CONSECUTIVE_AUTO_REPLY = 100 # maximum number of consecutive auto replies (subject to future change)
4747

48+
llm_config: Union[Dict, Literal[False]]
49+
4850
def __init__(
4951
self,
5052
name: str,
@@ -53,8 +55,8 @@ def __init__(
5355
max_consecutive_auto_reply: Optional[int] = None,
5456
human_input_mode: Optional[str] = "TERMINATE",
5557
function_map: Optional[Dict[str, Callable]] = None,
56-
code_execution_config: Optional[Union[Dict, bool]] = None,
57-
llm_config: Optional[Union[Dict, bool]] = None,
58+
code_execution_config: Optional[Union[Dict, Literal[False]]] = None,
59+
llm_config: Optional[Union[Dict, Literal[False]]] = None,
5860
default_auto_reply: Optional[Union[str, Dict, None]] = "",
5961
):
6062
"""
@@ -114,7 +116,9 @@ def __init__(
114116
self.llm_config.update(llm_config)
115117
self.client = OpenAIWrapper(**self.llm_config)
116118

117-
self._code_execution_config = {} if code_execution_config is None else code_execution_config
119+
self._code_execution_config: Union[Dict, Literal[False]] = (
120+
{} if code_execution_config is None else code_execution_config
121+
)
118122
self.human_input_mode = human_input_mode
119123
self._max_consecutive_auto_reply = (
120124
max_consecutive_auto_reply if max_consecutive_auto_reply is not None else self.MAX_CONSECUTIVE_AUTO_REPLY
@@ -135,7 +139,7 @@ def register_reply(
135139
self,
136140
trigger: Union[Type[Agent], str, Agent, Callable[[Agent], bool], List],
137141
reply_func: Callable,
138-
position: Optional[int] = 0,
142+
position: int = 0,
139143
config: Optional[Any] = None,
140144
reset_config: Optional[Callable] = None,
141145
):
@@ -162,7 +166,7 @@ def reply_func(
162166
messages: Optional[List[Dict]] = None,
163167
sender: Optional[Agent] = None,
164168
config: Optional[Any] = None,
165-
) -> Union[str, Dict, None]:
169+
) -> Tuple[bool, Union[str, Dict, None]]:
166170
```
167171
position (int): the position of the reply function in the reply function list.
168172
The function registered later will be checked earlier by default.
@@ -221,7 +225,7 @@ def chat_messages(self) -> Dict[Agent, List[Dict]]:
221225
"""A dictionary of conversations from agent to list of messages."""
222226
return self._oai_messages
223227

224-
def last_message(self, agent: Optional[Agent] = None) -> Dict:
228+
def last_message(self, agent: Optional[Agent] = None) -> Optional[Dict]:
225229
"""The last message exchanged with the agent.
226230
227231
Args:
@@ -304,7 +308,7 @@ def send(
304308
recipient: Agent,
305309
request_reply: Optional[bool] = None,
306310
silent: Optional[bool] = False,
307-
) -> bool:
311+
):
308312
"""Send a message to another agent.
309313
310314
Args:
@@ -353,7 +357,7 @@ async def a_send(
353357
recipient: Agent,
354358
request_reply: Optional[bool] = None,
355359
silent: Optional[bool] = False,
356-
) -> bool:
360+
):
357361
"""(async) Send a message to another agent.
358362
359363
Args:
@@ -399,6 +403,8 @@ async def a_send(
399403
def _print_received_message(self, message: Union[Dict, str], sender: Agent):
400404
# print the message received
401405
print(colored(sender.name, "yellow"), "(to", f"{self.name}):\n", flush=True)
406+
message = self._message_to_dict(message)
407+
402408
if message.get("role") == "function":
403409
func_print = f"***** Response from calling function \"{message['name']}\" *****"
404410
print(colored(func_print, "green"), flush=True)
@@ -606,7 +612,7 @@ def generate_oai_reply(
606612
self,
607613
messages: Optional[List[Dict]] = None,
608614
sender: Optional[Agent] = None,
609-
config: Optional[Any] = None,
615+
config: Optional[OpenAIWrapper] = None,
610616
) -> Tuple[bool, Union[str, Dict, None]]:
611617
"""Generate a reply using autogen.oai."""
612618
client = self.client if config is None else config
@@ -625,7 +631,7 @@ def generate_code_execution_reply(
625631
self,
626632
messages: Optional[List[Dict]] = None,
627633
sender: Optional[Agent] = None,
628-
config: Optional[Any] = None,
634+
config: Optional[Union[Dict, Literal[False]]] = None,
629635
):
630636
"""Generate a reply using code execution."""
631637
code_execution_config = config if config is not None else self._code_execution_config

autogen/agentchat/groupchat.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
1-
from dataclasses import dataclass
1+
import logging
22
import sys
3+
from dataclasses import dataclass
34
from typing import Dict, List, Optional, Union
5+
46
from .agent import Agent
57
from .conversable_agent import ConversableAgent
6-
import logging
78

89
logger = logging.getLogger(__name__)
910

autogen/agentchat/user_proxy_agent.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from .conversable_agent import ConversableAgent
2-
from typing import Callable, Dict, Optional, Union
2+
from typing import Callable, Dict, Literal, Optional, Union
33

44

55
class UserProxyAgent(ConversableAgent):
@@ -22,9 +22,9 @@ def __init__(
2222
max_consecutive_auto_reply: Optional[int] = None,
2323
human_input_mode: Optional[str] = "ALWAYS",
2424
function_map: Optional[Dict[str, Callable]] = None,
25-
code_execution_config: Optional[Union[Dict, bool]] = None,
25+
code_execution_config: Optional[Union[Dict, Literal[False]]] = None,
2626
default_auto_reply: Optional[Union[str, Dict, None]] = "",
27-
llm_config: Optional[Union[Dict, bool]] = False,
27+
llm_config: Optional[Union[Dict, Literal[False]]] = False,
2828
system_message: Optional[str] = "",
2929
):
3030
"""

0 commit comments

Comments
 (0)