Skip to content

Commit 85ad929

Browse files
bboynton97areibmansonichiekzhuHowieG
authored
AgentOps Runtime Logging Implementation (#2682)
* add agentops req * track conversable agents with agentops * track tool usage * track message sending * remove record from parent * remove record * simple example * notebook example * remove spacing change * optional dependency * documentation * remove extra import * optional import * record if agentops * if agentops * wrap function auto name * install agentops before notebook test * documentation fixes * notebook metadata * notebook metadata * pre-commit hook changes * doc link fixes * git lfs * autogen tag * bump agentops version * log tool events * notebook fixes * docs * formatting * Updated ecosystem manual * Update notebook for clarity * cleaned up notebook * updated precommit recommendations * Fixed links to screenshots and examples * removed unused files * changed notebook hyperlink * update docusaurus link path * reverted setup.py * change setup again * undo changes * revert conversable agent * removed file not in branch * Updated notebook to look nicer * change letter * revert setup * revert setup again * change ref link * change reflink * remove optional dependency * removed duplicated section * Addressed clarity commetns from howard * minor updates to wording * formatting and pr fixes * added info markdown cell * better docs * notebook * observability docs * pre-commit fixes * example images in notebook * example images in docs * example images in docs * delete agentops ong * doc updates * docs updates * docs updates * use agent as extra_kwarg * add logging tests * pass function properly * create table * dummy function name * log chat completion source name * safe serialize * test fixes * formatting * type checks --------- Co-authored-by: reibs <[email protected]> Co-authored-by: Chi Wang <[email protected]> Co-authored-by: Eric Zhu <[email protected]> Co-authored-by: Howard Gil <[email protected]> Co-authored-by: Alex Reibman <[email protected]>
1 parent 75f0808 commit 85ad929

16 files changed

+1145
-191
lines changed

autogen/agentchat/conversable_agent.py

+4-5
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131
from ..function_utils import get_function_schema, load_basemodels_if_needed, serialize_to_str
3232
from ..io.base import IOStream
3333
from ..oai.client import ModelClient, OpenAIWrapper
34-
from ..runtime_logging import log_event, log_new_agent, logging_enabled
34+
from ..runtime_logging import log_event, log_function_use, log_new_agent, logging_enabled
3535
from .agent import Agent, LLMAgent
3636
from .chat import ChatResult, a_initiate_chats, initiate_chats
3737
from .utils import consolidate_chat_info, gather_usage_summary
@@ -1357,9 +1357,7 @@ def _generate_oai_reply_from_client(self, llm_client, messages, cache) -> Union[
13571357

13581358
# TODO: #1143 handle token limit exceeded error
13591359
response = llm_client.create(
1360-
context=messages[-1].pop("context", None),
1361-
messages=all_messages,
1362-
cache=cache,
1360+
context=messages[-1].pop("context", None), messages=all_messages, cache=cache, agent=self
13631361
)
13641362
extracted_response = llm_client.extract_text_or_completion_object(response)[0]
13651363

@@ -2528,13 +2526,14 @@ def _wrap_function(self, func: F) -> F:
25282526
@functools.wraps(func)
25292527
def _wrapped_func(*args, **kwargs):
25302528
retval = func(*args, **kwargs)
2531-
2529+
log_function_use(self, func, kwargs, retval)
25322530
return serialize_to_str(retval)
25332531

25342532
@load_basemodels_if_needed
25352533
@functools.wraps(func)
25362534
async def _a_wrapped_func(*args, **kwargs):
25372535
retval = await func(*args, **kwargs)
2536+
log_function_use(self, func, kwargs, retval)
25382537
return serialize_to_str(retval)
25392538

25402539
wrapped_func = _a_wrapped_func if inspect.iscoroutinefunction(func) else _wrapped_func

autogen/logger/base_logger.py

+18-3
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,15 @@
33
import sqlite3
44
import uuid
55
from abc import ABC, abstractmethod
6-
from typing import TYPE_CHECKING, Any, Dict, List, Union
6+
from typing import TYPE_CHECKING, Any, Callable, Dict, List, TypeVar, Union
77

88
from openai import AzureOpenAI, OpenAI
99
from openai.types.chat import ChatCompletion
1010

1111
if TYPE_CHECKING:
1212
from autogen import Agent, ConversableAgent, OpenAIWrapper
1313

14+
F = TypeVar("F", bound=Callable[..., Any])
1415
ConfigItem = Dict[str, Union[str, List[str]]]
1516
LLMConfig = Dict[str, Union[None, float, int, ConfigItem, List[ConfigItem]]]
1617

@@ -32,6 +33,7 @@ def log_chat_completion(
3233
invocation_id: uuid.UUID,
3334
client_id: int,
3435
wrapper_id: int,
36+
source: Union[str, Agent],
3537
request: Dict[str, Union[float, str, List[Dict[str, str]]]],
3638
response: Union[str, ChatCompletion],
3739
is_cached: int,
@@ -49,9 +51,10 @@ def log_chat_completion(
4951
invocation_id (uuid): A unique identifier for the invocation to the OpenAIWrapper.create method call
5052
client_id (int): A unique identifier for the underlying OpenAI client instance
5153
wrapper_id (int): A unique identifier for the OpenAIWrapper instance
52-
request (dict): A dictionary representing the the request or call to the OpenAI client endpoint
54+
source (str or Agent): The source/creator of the event as a string name or an Agent instance
55+
request (dict): A dictionary representing the request or call to the OpenAI client endpoint
5356
response (str or ChatCompletion): The response from OpenAI
54-
is_chached (int): 1 if the response was a cache hit, 0 otherwise
57+
is_cached (int): 1 if the response was a cache hit, 0 otherwise
5558
cost(float): The cost for OpenAI response
5659
start_time (str): A string representing the moment the request was initiated
5760
"""
@@ -104,6 +107,18 @@ def log_new_client(
104107
"""
105108
...
106109

110+
@abstractmethod
111+
def log_function_use(self, source: Union[str, Agent], function: F, args: Dict[str, Any], returns: Any) -> None:
112+
"""
113+
Log the use of a registered function (could be a tool)
114+
115+
Args:
116+
source (str or Agent): The source/creator of the event as a string name or an Agent instance
117+
function (F): The function information
118+
args (dict): The function args to log
119+
returns (any): The return
120+
"""
121+
107122
@abstractmethod
108123
def stop(self) -> None:
109124
"""

autogen/logger/file_logger.py

+43-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
import os
66
import threading
77
import uuid
8-
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
8+
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, TypeVar, Union
99

1010
from openai import AzureOpenAI, OpenAI
1111
from openai.types.chat import ChatCompletion
@@ -21,9 +21,21 @@
2121

2222
logger = logging.getLogger(__name__)
2323

24+
F = TypeVar("F", bound=Callable[..., Any])
25+
2426
__all__ = ("FileLogger",)
2527

2628

29+
def safe_serialize(obj: Any) -> str:
30+
def default(o: Any) -> str:
31+
if hasattr(o, "to_json"):
32+
return str(o.to_json())
33+
else:
34+
return f"<<non-serializable: {type(o).__qualname__}>>"
35+
36+
return json.dumps(obj, default=default)
37+
38+
2739
class FileLogger(BaseLogger):
2840
def __init__(self, config: Dict[str, Any]):
2941
self.config = config
@@ -59,6 +71,7 @@ def log_chat_completion(
5971
invocation_id: uuid.UUID,
6072
client_id: int,
6173
wrapper_id: int,
74+
source: Union[str, Agent],
6275
request: Dict[str, Union[float, str, List[Dict[str, str]]]],
6376
response: Union[str, ChatCompletion],
6477
is_cached: int,
@@ -69,6 +82,11 @@ def log_chat_completion(
6982
Log a chat completion.
7083
"""
7184
thread_id = threading.get_ident()
85+
source_name = None
86+
if isinstance(source, str):
87+
source_name = source
88+
else:
89+
source_name = source.name
7290
try:
7391
log_data = json.dumps(
7492
{
@@ -82,6 +100,7 @@ def log_chat_completion(
82100
"start_time": start_time,
83101
"end_time": get_current_ts(),
84102
"thread_id": thread_id,
103+
"source_name": source_name,
85104
}
86105
)
87106

@@ -204,6 +223,29 @@ def log_new_client(
204223
except Exception as e:
205224
self.logger.error(f"[file_logger] Failed to log event {e}")
206225

226+
def log_function_use(self, source: Union[str, Agent], function: F, args: Dict[str, Any], returns: Any) -> None:
227+
"""
228+
Log a registered function(can be a tool) use from an agent or a string source.
229+
"""
230+
thread_id = threading.get_ident()
231+
232+
try:
233+
log_data = json.dumps(
234+
{
235+
"source_id": id(source),
236+
"source_name": str(source.name) if hasattr(source, "name") else source,
237+
"agent_module": source.__module__,
238+
"agent_class": source.__class__.__name__,
239+
"timestamp": get_current_ts(),
240+
"thread_id": thread_id,
241+
"input_args": safe_serialize(args),
242+
"returns": safe_serialize(returns),
243+
}
244+
)
245+
self.logger.info(log_data)
246+
except Exception as e:
247+
self.logger.error(f"[file_logger] Failed to log event {e}")
248+
207249
def get_connection(self) -> None:
208250
"""Method is intentionally left blank because there is no specific connection needed for the FileLogger."""
209251
pass

autogen/logger/sqlite_logger.py

+54-3
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import sqlite3
77
import threading
88
import uuid
9-
from typing import TYPE_CHECKING, Any, Dict, List, Tuple, Union
9+
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Tuple, TypeVar, Union
1010

1111
from openai import AzureOpenAI, OpenAI
1212
from openai.types.chat import ChatCompletion
@@ -25,6 +25,18 @@
2525

2626
__all__ = ("SqliteLogger",)
2727

28+
F = TypeVar("F", bound=Callable[..., Any])
29+
30+
31+
def safe_serialize(obj: Any) -> str:
32+
def default(o: Any) -> str:
33+
if hasattr(o, "to_json"):
34+
return str(o.to_json())
35+
else:
36+
return f"<<non-serializable: {type(o).__qualname__}>>"
37+
38+
return json.dumps(obj, default=default)
39+
2840

2941
class SqliteLogger(BaseLogger):
3042
schema_version = 1
@@ -49,6 +61,7 @@ def start(self) -> str:
4961
client_id INTEGER,
5062
wrapper_id INTEGER,
5163
session_id TEXT,
64+
source_name TEXT,
5265
request TEXT,
5366
response TEXT,
5467
is_cached INEGER,
@@ -118,6 +131,18 @@ class TEXT, -- type or class name of cli
118131
"""
119132
self._run_query(query=query)
120133

134+
query = """
135+
CREATE TABLE IF NOT EXISTS function_calls (
136+
source_id INTEGER,
137+
source_name TEXT,
138+
function_name TEXT,
139+
args TEXT DEFAULT NULL,
140+
returns TEXT DEFAULT NULL,
141+
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
142+
);
143+
"""
144+
self._run_query(query=query)
145+
121146
current_verion = self._get_current_db_version()
122147
if current_verion is None:
123148
self._run_query(
@@ -192,6 +217,7 @@ def log_chat_completion(
192217
invocation_id: uuid.UUID,
193218
client_id: int,
194219
wrapper_id: int,
220+
source: Union[str, Agent],
195221
request: Dict[str, Union[float, str, List[Dict[str, str]]]],
196222
response: Union[str, ChatCompletion],
197223
is_cached: int,
@@ -208,10 +234,16 @@ def log_chat_completion(
208234
else:
209235
response_messages = json.dumps(to_dict(response), indent=4)
210236

237+
source_name = None
238+
if isinstance(source, str):
239+
source_name = source
240+
else:
241+
source_name = source.name
242+
211243
query = """
212244
INSERT INTO chat_completions (
213-
invocation_id, client_id, wrapper_id, session_id, request, response, is_cached, cost, start_time, end_time
214-
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
245+
invocation_id, client_id, wrapper_id, session_id, request, response, is_cached, cost, start_time, end_time, source_name
246+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
215247
"""
216248
args = (
217249
invocation_id,
@@ -224,6 +256,7 @@ def log_chat_completion(
224256
cost,
225257
start_time,
226258
end_time,
259+
source_name,
227260
)
228261

229262
self._run_query(query=query, args=args)
@@ -335,6 +368,24 @@ def log_new_wrapper(self, wrapper: OpenAIWrapper, init_args: Dict[str, Union[LLM
335368
)
336369
self._run_query(query=query, args=args)
337370

371+
def log_function_use(self, source: Union[str, Agent], function: F, args: Dict[str, Any], returns: Any) -> None:
372+
373+
if self.con is None:
374+
return
375+
376+
query = """
377+
INSERT INTO function_calls (source_id, source_name, function_name, args, returns, timestamp) VALUES (?, ?, ?, ?, ?, ?)
378+
"""
379+
query_args: Tuple[Any, ...] = (
380+
id(source),
381+
source.name if hasattr(source, "name") else source,
382+
function.__name__,
383+
safe_serialize(args),
384+
safe_serialize(returns),
385+
get_current_ts(),
386+
)
387+
self._run_query(query=query, args=query_args)
388+
338389
def log_new_client(
339390
self, client: Union[AzureOpenAI, OpenAI, GeminiClient], wrapper: OpenAIWrapper, init_args: Dict[str, Any]
340391
) -> None:

autogen/oai/client.py

+6
Original file line numberDiff line numberDiff line change
@@ -319,6 +319,7 @@ class OpenAIWrapper:
319319
"""A wrapper class for openai client."""
320320

321321
extra_kwargs = {
322+
"agent",
322323
"cache",
323324
"cache_seed",
324325
"filter_func",
@@ -542,6 +543,7 @@ def create(self, **config: Any) -> ModelClient.ModelClientResponseProtocol:
542543
Note that the cache argument overrides the legacy cache_seed argument: if this argument is provided,
543544
then the cache_seed argument is ignored. If this argument is not provided or None,
544545
then the cache_seed argument is used.
546+
- agent (AbstractAgent | None): The object responsible for creating a completion if an agent.
545547
- (Legacy) cache_seed (int | None) for using the DiskCache. Default to 41.
546548
An integer cache_seed is useful when implementing "controlled randomness" for the completion.
547549
None for no caching.
@@ -589,6 +591,7 @@ def yes_or_no_filter(context, response):
589591
cache = extra_kwargs.get("cache")
590592
filter_func = extra_kwargs.get("filter_func")
591593
context = extra_kwargs.get("context")
594+
agent = extra_kwargs.get("agent")
592595

593596
total_usage = None
594597
actual_usage = None
@@ -626,6 +629,7 @@ def yes_or_no_filter(context, response):
626629
invocation_id=invocation_id,
627630
client_id=id(client),
628631
wrapper_id=id(self),
632+
agent=agent,
629633
request=params,
630634
response=response,
631635
is_cached=1,
@@ -658,6 +662,7 @@ def yes_or_no_filter(context, response):
658662
invocation_id=invocation_id,
659663
client_id=id(client),
660664
wrapper_id=id(self),
665+
agent=agent,
661666
request=params,
662667
response=f"error_code:{error_code}, config {i} failed",
663668
is_cached=0,
@@ -688,6 +693,7 @@ def yes_or_no_filter(context, response):
688693
invocation_id=invocation_id,
689694
client_id=id(client),
690695
wrapper_id=id(self),
696+
agent=agent,
691697
request=params,
692698
response=response,
693699
is_cached=0,

autogen/runtime_logging.py

+13-2
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import logging
44
import sqlite3
55
import uuid
6-
from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Union
6+
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Literal, Optional, TypeVar, Union
77

88
from openai import AzureOpenAI, OpenAI
99
from openai.types.chat import ChatCompletion
@@ -20,6 +20,8 @@
2020
autogen_logger = None
2121
is_logging = False
2222

23+
F = TypeVar("F", bound=Callable[..., Any])
24+
2325

2426
def start(
2527
logger: Optional[BaseLogger] = None,
@@ -56,6 +58,7 @@ def log_chat_completion(
5658
invocation_id: uuid.UUID,
5759
client_id: int,
5860
wrapper_id: int,
61+
agent: Union[str, Agent],
5962
request: Dict[str, Union[float, str, List[Dict[str, str]]]],
6063
response: Union[str, ChatCompletion],
6164
is_cached: int,
@@ -67,7 +70,7 @@ def log_chat_completion(
6770
return
6871

6972
autogen_logger.log_chat_completion(
70-
invocation_id, client_id, wrapper_id, request, response, is_cached, cost, start_time
73+
invocation_id, client_id, wrapper_id, agent, request, response, is_cached, cost, start_time
7174
)
7275

7376

@@ -87,6 +90,14 @@ def log_event(source: Union[str, Agent], name: str, **kwargs: Dict[str, Any]) ->
8790
autogen_logger.log_event(source, name, **kwargs)
8891

8992

93+
def log_function_use(agent: Union[str, Agent], function: F, args: Dict[str, Any], returns: any):
94+
if autogen_logger is None:
95+
logger.error("[runtime logging] log_function_use: autogen logger is None")
96+
return
97+
98+
autogen_logger.log_function_use(agent, function, args, returns)
99+
100+
90101
def log_new_wrapper(wrapper: OpenAIWrapper, init_args: Dict[str, Union[LLMConfig, List[LLMConfig]]]) -> None:
91102
if autogen_logger is None:
92103
logger.error("[runtime logging] log_new_wrapper: autogen logger is None")

0 commit comments

Comments
 (0)