3
3
import copy
4
4
import json
5
5
import logging
6
- from typing import Any , Callable , Dict , List , Optional , Tuple , Type , Union
6
+ from typing import Any , Callable , Dict , List , Literal , Optional , Tuple , Type , Union
7
7
from autogen import OpenAIWrapper
8
8
from .agent import Agent
9
9
from autogen .code_utils import (
@@ -45,6 +45,8 @@ class ConversableAgent(Agent):
45
45
}
46
46
MAX_CONSECUTIVE_AUTO_REPLY = 100 # maximum number of consecutive auto replies (subject to future change)
47
47
48
+ llm_config : Union [Dict , Literal [False ]]
49
+
48
50
def __init__ (
49
51
self ,
50
52
name : str ,
@@ -53,8 +55,8 @@ def __init__(
53
55
max_consecutive_auto_reply : Optional [int ] = None ,
54
56
human_input_mode : Optional [str ] = "TERMINATE" ,
55
57
function_map : Optional [Dict [str , Callable ]] = None ,
56
- code_execution_config : Optional [Union [Dict , bool ]] = None ,
57
- llm_config : Optional [Union [Dict , bool ]] = None ,
58
+ code_execution_config : Optional [Union [Dict , Literal [ False ] ]] = None ,
59
+ llm_config : Optional [Union [Dict , Literal [ False ] ]] = None ,
58
60
default_auto_reply : Optional [Union [str , Dict , None ]] = "" ,
59
61
):
60
62
"""
@@ -114,7 +116,9 @@ def __init__(
114
116
self .llm_config .update (llm_config )
115
117
self .client = OpenAIWrapper (** self .llm_config )
116
118
117
- self ._code_execution_config = {} if code_execution_config is None else code_execution_config
119
+ self ._code_execution_config : Union [Dict , Literal [False ]] = (
120
+ {} if code_execution_config is None else code_execution_config
121
+ )
118
122
self .human_input_mode = human_input_mode
119
123
self ._max_consecutive_auto_reply = (
120
124
max_consecutive_auto_reply if max_consecutive_auto_reply is not None else self .MAX_CONSECUTIVE_AUTO_REPLY
@@ -135,7 +139,7 @@ def register_reply(
135
139
self ,
136
140
trigger : Union [Type [Agent ], str , Agent , Callable [[Agent ], bool ], List ],
137
141
reply_func : Callable ,
138
- position : Optional [ int ] = 0 ,
142
+ position : int = 0 ,
139
143
config : Optional [Any ] = None ,
140
144
reset_config : Optional [Callable ] = None ,
141
145
):
@@ -162,7 +166,7 @@ def reply_func(
162
166
messages: Optional[List[Dict]] = None,
163
167
sender: Optional[Agent] = None,
164
168
config: Optional[Any] = None,
165
- ) -> Union[str, Dict, None]:
169
+ ) -> Tuple[bool, Union[str, Dict, None] ]:
166
170
```
167
171
position (int): the position of the reply function in the reply function list.
168
172
The function registered later will be checked earlier by default.
@@ -221,7 +225,7 @@ def chat_messages(self) -> Dict[Agent, List[Dict]]:
221
225
"""A dictionary of conversations from agent to list of messages."""
222
226
return self ._oai_messages
223
227
224
- def last_message (self , agent : Optional [Agent ] = None ) -> Dict :
228
+ def last_message (self , agent : Optional [Agent ] = None ) -> Optional [ Dict ] :
225
229
"""The last message exchanged with the agent.
226
230
227
231
Args:
@@ -304,7 +308,7 @@ def send(
304
308
recipient : Agent ,
305
309
request_reply : Optional [bool ] = None ,
306
310
silent : Optional [bool ] = False ,
307
- ) -> bool :
311
+ ):
308
312
"""Send a message to another agent.
309
313
310
314
Args:
@@ -353,7 +357,7 @@ async def a_send(
353
357
recipient : Agent ,
354
358
request_reply : Optional [bool ] = None ,
355
359
silent : Optional [bool ] = False ,
356
- ) -> bool :
360
+ ):
357
361
"""(async) Send a message to another agent.
358
362
359
363
Args:
@@ -399,6 +403,8 @@ async def a_send(
399
403
def _print_received_message (self , message : Union [Dict , str ], sender : Agent ):
400
404
# print the message received
401
405
print (colored (sender .name , "yellow" ), "(to" , f"{ self .name } ):\n " , flush = True )
406
+ message = self ._message_to_dict (message )
407
+
402
408
if message .get ("role" ) == "function" :
403
409
func_print = f"***** Response from calling function \" { message ['name' ]} \" *****"
404
410
print (colored (func_print , "green" ), flush = True )
@@ -606,7 +612,7 @@ def generate_oai_reply(
606
612
self ,
607
613
messages : Optional [List [Dict ]] = None ,
608
614
sender : Optional [Agent ] = None ,
609
- config : Optional [Any ] = None ,
615
+ config : Optional [OpenAIWrapper ] = None ,
610
616
) -> Tuple [bool , Union [str , Dict , None ]]:
611
617
"""Generate a reply using autogen.oai."""
612
618
client = self .client if config is None else config
@@ -625,7 +631,7 @@ def generate_code_execution_reply(
625
631
self ,
626
632
messages : Optional [List [Dict ]] = None ,
627
633
sender : Optional [Agent ] = None ,
628
- config : Optional [Any ] = None ,
634
+ config : Optional [Union [ Dict , Literal [ False ]] ] = None ,
629
635
):
630
636
"""Generate a reply using code execution."""
631
637
code_execution_config = config if config is not None else self ._code_execution_config
0 commit comments