diff --git a/python/packages/autogen-core/src/autogen_core/model_context/__init__.py b/python/packages/autogen-core/src/autogen_core/model_context/__init__.py index 7d32b76f48a9..47ce9421e44a 100644 --- a/python/packages/autogen-core/src/autogen_core/model_context/__init__.py +++ b/python/packages/autogen-core/src/autogen_core/model_context/__init__.py @@ -1,9 +1,9 @@ -from ._buffered_chat_completion_context import ( - BufferedChatCompletionContext, - UnboundedBufferedChatCompletionContext, -) +from ._buffered_chat_completion_context import BufferedChatCompletionContext from ._chat_completion_context import ChatCompletionContext from ._head_and_tail_chat_completion_context import HeadAndTailChatCompletionContext +from ._unbounded_buffered_chat_completion_context import ( + UnboundedBufferedChatCompletionContext, +) __all__ = [ "ChatCompletionContext", diff --git a/python/packages/autogen-core/src/autogen_core/model_context/_buffered_chat_completion_context.py b/python/packages/autogen-core/src/autogen_core/model_context/_buffered_chat_completion_context.py index 282dbd58058c..15b634fad6ac 100644 --- a/python/packages/autogen-core/src/autogen_core/model_context/_buffered_chat_completion_context.py +++ b/python/packages/autogen-core/src/autogen_core/model_context/_buffered_chat_completion_context.py @@ -43,36 +43,3 @@ def save_state(self) -> Mapping[str, Any]: def load_state(self, state: Mapping[str, Any]) -> None: self._messages = state["messages"] self._buffer_size = state["buffer_size"] - - -class UnboundedBufferedChatCompletionContext(ChatCompletionContext): - """A buffered chat completion context that keeps a view of the last n messages, - where n is the buffer size. The buffer size is set at initialization. - - Args: - buffer_size (int): The size of the buffer. - - """ - - def __init__(self, initial_messages: List[LLMMessage] | None = None) -> None: - self._messages: List[LLMMessage] = initial_messages or [] - - async def add_message(self, message: LLMMessage) -> None: - """Add a message to the memory.""" - self._messages.append(message) - - async def get_messages(self) -> List[LLMMessage]: - """Get at most `buffer_size` recent messages.""" - return self._messages - - async def clear(self) -> None: - """Clear the message memory.""" - self._messages = [] - - def save_state(self) -> Mapping[str, Any]: - return { - "messages": [message for message in self._messages], - } - - def load_state(self, state: Mapping[str, Any]) -> None: - self._messages = state["messages"] diff --git a/python/packages/autogen-core/src/autogen_core/model_context/_unbounded_buffered_chat_completion_context.py b/python/packages/autogen-core/src/autogen_core/model_context/_unbounded_buffered_chat_completion_context.py new file mode 100644 index 000000000000..651ef3095357 --- /dev/null +++ b/python/packages/autogen-core/src/autogen_core/model_context/_unbounded_buffered_chat_completion_context.py @@ -0,0 +1,31 @@ +from typing import Any, List, Mapping + +from ..models import LLMMessage +from ._chat_completion_context import ChatCompletionContext + + +class UnboundedBufferedChatCompletionContext(ChatCompletionContext): + """An unbounded buffered chat completion context that keeps a view of the all the messages.""" + + def __init__(self, initial_messages: List[LLMMessage] | None = None) -> None: + self._messages: List[LLMMessage] = initial_messages or [] + + async def add_message(self, message: LLMMessage) -> None: + """Add a message to the memory.""" + self._messages.append(message) + + async def get_messages(self) -> List[LLMMessage]: + """Get at most `buffer_size` recent messages.""" + return self._messages + + async def clear(self) -> None: + """Clear the message memory.""" + self._messages = [] + + def save_state(self) -> Mapping[str, Any]: + return { + "messages": [message for message in self._messages], + } + + def load_state(self, state: Mapping[str, Any]) -> None: + self._messages = state["messages"]