Skip to content

Commit

Permalink
Update to store multiple chats in a file. Bump to 3.1.0
Browse files Browse the repository at this point in the history
  • Loading branch information
schleising committed Aug 13, 2023
1 parent f98fe54 commit 4b5adbc
Show file tree
Hide file tree
Showing 10 changed files with 200 additions and 163 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -130,3 +130,5 @@ dmypy.json
.pyre/
open_ai_key.txt
pypi-stats-383917-43984cdcb072.json

storage/
14 changes: 10 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,17 @@ For the synchronous version, you can use the following code:
from simple_openai import SimpleOpenai

def main():
# Initialise a storage location
storage_location = Path("/path/to/storage")

# Create a system message
system_message = "You are a helpful chatbot. You are very friendly and helpful. You are a good friend to have."

# Create the client
client = SimpleOpenai(api_key, system_message)
client = SimpleOpenai(api_key, system_message, storage_location)

# Create tasks for the chat response and the image response
result = client.get_chat_response("Hello, how are you?", name="Bob")
result = client.get_chat_response("Hello, how are you?", name="Bob", chat_id="Group 1")

# Print the result
if result.success:
Expand Down Expand Up @@ -58,15 +61,18 @@ For the asynchronous version, you can use the following code:
import asyncio

async def main():
# Initialise a storage location
storage_location = Path("/path/to/storage")

# Create a system message
system_message = "You are a helpful chatbot. You are very friendly and helpful. You are a good friend to have."

# Create the client
client = AsyncSimpleOpenai(api_key, system_message)
client = AsyncSimpleOpenai(api_key, system_message, storage_location)

# Create tasks for the chat response and the image response
tasks = [
client.get_chat_response("Hello, how are you?", name="Bob"),
client.get_chat_response("Hello, how are you?", name="Bob", chat_id="Group 1"),
client.get_image_url("A cat"),
]

Expand Down
14 changes: 10 additions & 4 deletions docs/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,17 @@ For the synchronous version, you can use the following code:
from simple_openai import SimpleOpenai

def main():
# Initialise a storage location
storage_location = Path("/path/to/storage")

# Create a system message
system_message = "You are a helpful chatbot. You are very friendly and helpful. You are a good friend to have."

# Create the client
client = SimpleOpenai(api_key, system_message)
client = SimpleOpenai(api_key, system_message, storage_location)

# Create tasks for the chat response and the image response
result = client.get_chat_response("Hello, how are you?", name="Bob")
result = client.get_chat_response("Hello, how are you?", name="Bob", chat_id="Group 1")

# Print the result
if result.success:
Expand Down Expand Up @@ -63,15 +66,18 @@ For the asynchronous version, you can use the following code:
import asyncio

async def main():
# Initialise a storage location
storage_location = Path("/path/to/storage")

# Create a system message
system_message = "You are a helpful chatbot. You are very friendly and helpful. You are a good friend to have."

# Create the client
client = AsyncSimpleOpenai(api_key, system_message)
client = AsyncSimpleOpenai(api_key, system_message, storage_location)

# Create tasks for the chat response and the image response
tasks = [
client.get_chat_response("Hello, how are you?", name="Bob"),
client.get_chat_response("Hello, how are you?", name="Bob", chat_id="Group 1"),
client.get_image_url("A cat"),
]

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "simple-openai"
version = "3.0.0"
version = "3.1.0"
description = "Simple OpenAI API wrapper"
readme = "README.md"
authors = [{ name = "Stephen Schleising", email = "[email protected]" }]
Expand Down
75 changes: 16 additions & 59 deletions src/simple_openai/async_simple_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
It is intended for use with asyncio applications. If you are not using asyncio, you should use the [Simple OpenAI API wrapper](/simple_openai/simple_openai/) instead.
"""

from pathlib import Path
import aiohttp

from . import constants
Expand All @@ -19,25 +20,31 @@ class AsyncSimpleOpenai:
To use this class, you need to have an OpenAI API key. You can get one from [Openai](https://platform.openai.com).
An optional storage path can be provided. If a storage path is provided, the chat messages will be stored in the directory specified by the storage path. If no storage path is provided, the chat messages will not be stored.
Args:
api_key (str): Your OpenAI API key
system_message (str): The system message to add to the start of the chat
storage_path (Path, optional): The path to the storage directory. Defaults to None.
!!!Example
```python
from simple_openai import AsyncSimpleOpenai
import asyncio
async def main():
# Get the storage path
storage_path = Path("/path/to/storage")
# Create a system message
system_message = "You are a helpful chatbot. You are very friendly and helpful. You are a good friend to have."
# Create the client
client = AsyncSimpleOpenai(api_key, system_message)
client = AsyncSimpleOpenai(api_key, system_message, storage_path)
# Create tasks for the chat response and the image response
tasks = [
client.get_chat_response("Hello, how are you?", name="Bob"),
client.get_chat_response("Hello, how are you?", name="Bob", chat_id="Group 1"),
client.get_image_url("A cat"),
]
Expand All @@ -59,56 +66,31 @@ async def main():
asyncio.run(main())
```
"""
def __init__(self, api_key: str, system_message: str) -> None:
def __init__(self, api_key: str, system_message: str, storage_path: Path | None = None) -> None:
self._headers = {
'Content-Type': 'application/json',
'Authorization': f'Bearer {api_key}'
}

# Create the chat manager
self._chat = chat_manager.ChatManager(system_message)
self._chat = chat_manager.ChatManager(system_message, storage_path=storage_path)

async def get_chat_response(self, prompt: str, name: str) -> SimpleOpenaiResponse:
async def get_chat_response(self, prompt: str, name: str, chat_id: str = constants.DEFAULT_CHAT_ID) -> SimpleOpenaiResponse:
"""Get a chat response from OpenAI
An optional chat ID can be provided. If a chat ID is provided, the chat will be continued from the chat with the specified ID. If no chat ID is provided, all messages will be mixed into a single list.
Args:
prompt (str): The prompt to use for the chat response
name (str): The name of the user
chat_id (str, optional): The ID of the chat to continue. Defaults to DEFAULT_CHAT_ID.
Returns:
SimpleOpenaiResponse: The chat response, the value of `success` should be checked before using the value of `message`
!!!Example
```python
from simple_openai import AsyncSimpleOpenai
import asyncio
async def main():
# Create a system message
system_message = "You are a helpful chatbot. You are very friendly and helpful. You are a good friend to have."
# Create the client
client = AsyncSimpleOpenai(api_key, system_message)
# Get the chat response
response = await client.get_chat_response("Hello, how are you?", name="Bob")
# Check if the request was successful
if response.success:
# Print the chat response
print(f'Chat response: {response.message}')
else:
# Print the error message
print(f'Error: {response.message}')
if __name__ == "__main__":
asyncio.run(main())
```
"""
# Add the message to the chat
messages = self._chat.add_message(open_ai_models.ChatMessage(role='user', content=prompt, name=name)).messages
messages = self._chat.add_message(open_ai_models.ChatMessage(role='user', content=prompt, name=name), chat_id=chat_id).messages

# Create the request body
request_body = open_ai_models.ChatRequest(messages=messages)
Expand Down Expand Up @@ -145,31 +127,6 @@ async def get_image_url(self, prompt: str) -> SimpleOpenaiResponse:
Returns:
SimpleOpenaiResponse: The image response, the value of `success` should be checked before using the value of `message`
!!!Example
```python
from simple_openai import AsyncSimpleOpenai
import asyncio
async def main():
# Create the client
client = AsyncSimpleOpenai(api_key)
# Get the image response
response = await client.get_image_url("A cat")
# Check if the request was successful
if response.success:
# Print the image URL
print(f'Image Generated Successfully, it can be found at {response.message}')
else:
# Print the error message
print(f'Image Generation Failed, Error: {response.message}')
if __name__ == "__main__":
asyncio.run(main())
```
"""

# Create the request body
Expand Down
61 changes: 49 additions & 12 deletions src/simple_openai/chat_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,43 +2,80 @@
This module contains the chat manager for the simple openai app.
The chat manager is used to manage the chat messages and create the chat, it limits the number of messages in the chat to 11 by default and adds the system message to the start of the list.
The chat manager is used to manage the chat messages and create the chat, it limits the number of messages in the chat to 21 by default and adds the system message to the start of the list.
"""


from collections import deque
from pathlib import Path
import pickle

from .models import open_ai_models
from .constants import MAX_CHAT_HISTORY, CHAT_HISTORY_FILE, DEFAULT_CHAT_ID

class ChatManager:
def __init__(self, system_message: str, max_messages = 11) -> None:
"""Initialise the chat manager
"""The chat manager
Args:
system_message (str): The system message to add to the start of the chat
max_messages (int, optional): The maximum number of messages in the chat. Defaults to 11.
"""
This class is used to manage the chat messages and create the chat, it limits the number of messages in the chat to 21 by default and adds the system message to the start of the list.
It can optionally handle messages from multiple chats separately and store them in a file.
On initialisation, the chat manager will try to load the chat history from the file. If the file does not exist, it will create a new chat history.
Args:
system_message (str): The system message to add to the start of the chat
max_messages (int, optional): The maximum number of messages in the chat. Defaults to 21.
storage_path (Path, optional): The path to the storage directory. Defaults to None.
"""
def __init__(self, system_message: str, max_messages: int = MAX_CHAT_HISTORY, storage_path: Path | None = None) -> None:
self._system_message = system_message
self._max_messages = max_messages
self._storage_path = storage_path

# If a storage path is provided, create the storage directory
if self._storage_path is not None:
self._storage_path.mkdir(parents=True, exist_ok=True)

# initialise a deque of messages not including the system message
self._messages: deque[open_ai_models.ChatMessage] = deque(maxlen=max_messages)
# Try to load the chat history
try:
# Load the chat history
with open(self._storage_path / CHAT_HISTORY_FILE, 'rb') as f:
# Load the chat history
self._messages: dict[str, deque[open_ai_models.ChatMessage]] = pickle.load(f)
except FileNotFoundError:
# initialise a deque of messages not including the system message
self._messages: dict[str, deque[open_ai_models.ChatMessage]] = {}
else:
# initialise a deque of messages not including the system message
self._messages: dict[str, deque[open_ai_models.ChatMessage]] = {}

def add_message(self, message: open_ai_models.ChatMessage) -> open_ai_models.Chat:
def add_message(self, message: open_ai_models.ChatMessage, chat_id: str = DEFAULT_CHAT_ID) -> open_ai_models.Chat:
"""Add a message to the chat
Args:
message (open_ai_models.ChatMessage): The message to add to the chat
chat_id (str, optional): The ID of the chat to add the message to. Defaults to DEFAULT_CHAT_ID.
Returns:
open_ai_models.Chat: The chat
"""
# If the chat ID is not in the messages, create a new deque
if chat_id not in self._messages:
self._messages[chat_id] = deque(maxlen=self._max_messages)

# Add the message to the deque
self._messages.append(message)
self._messages[chat_id].append(message)

# If a storage path is provided, save the chat history
if self._storage_path is not None:
# Save the chat history
with open(self._storage_path / CHAT_HISTORY_FILE, 'wb') as f:
pickle.dump(self._messages, f)

# Create the chat adding the system message to the start
chat = open_ai_models.Chat(messages=[
open_ai_models.ChatMessage(role='system', content=self._system_message, name='System')
] + list(self._messages))
] + list(self._messages[chat_id]))

# Return the chat
return chat
7 changes: 7 additions & 0 deletions src/simple_openai/constants.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,13 @@
from pathlib import Path


BASE_URL = 'https://api.openai.com'
CHAT_URL = '/v1/chat/completions'
IMAGE_URL = '/v1/images/generations'

FULL_CHAT_URL = BASE_URL + CHAT_URL
FULL_IMAGE_URL = BASE_URL + IMAGE_URL

MAX_CHAT_HISTORY = 21
CHAT_HISTORY_FILE = Path('chat_history.pickle')
DEFAULT_CHAT_ID = 'default'
Loading

0 comments on commit 4b5adbc

Please sign in to comment.