Skip to content

Commit

Permalink
Release 0.0.5
Browse files Browse the repository at this point in the history
Now it displays warning message if retry count exceeds 5
  • Loading branch information
superheavytail committed Mar 24, 2024
1 parent 2e5de26 commit 2f55e70
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 8 deletions.
9 changes: 3 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ resp = call_chatgpt(
system_message=['You are a helpful assistant.'] * len(prompts),
model_name=CHATGPT_VERSION_NAME, # default is 'gpt-3.5-turbo'
temperature=TEMPERATURE, # default 0.0
chunk_size=CHATGPT_CHUNK_SIZE,
chunk_size=CONCURRENCY_NUM,
timeout_each=TIMEOUT_EACH,
sleep_between_chunk=SLEEP_BETWEEN_CHUNK,
pkl_path=file_dir, # ex) "result.pkl'
Expand All @@ -55,15 +55,12 @@ resp = call_chatgpt(
- ```system_message```: list of system prompt. It can be a str or a list of str that has same length to human_message
- ```model_name```: ChatGPT API name (ex: "gpt-4-1106-preview")
- ```temperature```: Controls randomness of the generated text
- ```chunk_size```: The number of examples which send in one batch
- ```timeout_each```: API call timeout
- ```chunk_size```: The number of examples which simultaneously send in one batch
- ```timeout_each```: API call timeout (for each batch)
- ```sleep_between_chunk```: sleep time between batches
- ```pkl_path```: Specifies the path where output will be saved. By default, outputs are not saved.
- ```verbose```: If true, debugging message will be printed.

## Warning
- Currently, no retry count limit is set. So, It should be manually stopped if the API is failed repeatedly. (Will be fixed soon)

## Requirements
- langchain
- langchain-openai
Expand Down
2 changes: 1 addition & 1 deletion batched_chatgpt/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from .chatgpt_utils import call_chatgpt

__version__ = '0.0.4'
__version__ = '0.0.5'
10 changes: 10 additions & 0 deletions batched_chatgpt/chatgpt_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
from pprint import pprint
from multiprocessing import Queue, Process
from pathlib import Path
from logging import getLogger
from itertools import count
import time
import os

Expand All @@ -12,6 +14,9 @@
from .utils import pickle_bobj, get_saving_filename_safely


logger = getLogger(__name__)


def process_chunk_element(i, queue, item, model_name, temperature):
chat = ChatOpenAI(model=model_name, temperature=temperature)
res = chat.invoke(item)
Expand Down Expand Up @@ -62,6 +67,7 @@ def batched_multiprocess_auto_retry(
pkl_path = get_saving_filename_safely(pkl_path) if pkl_path else None # if pkl_path, result saved.

outputs = [None] * len(items)
c = count()
while not all(outputs):
# printing remained queries if the number of remained queries is small
num_of_remains = outputs.count(None)
Expand All @@ -83,6 +89,10 @@ def batched_multiprocess_auto_retry(
pickle_bobj(outputs, pkl_path) if pkl_path else None

time.sleep(sleep_between_chunk) if not all(outputs) else ...

# display a warning message if the global retry count exceeds 5.
if next(c) >= 5:
logger.warning("Retry count has exceeds 5. The process may be stuck due to an unresponsive item.")
return outputs


Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

setup(
name="batched_chatgpt",
version="0.0.4",
version="0.0.5",
description="Easy calling chatgpt with batched instances",
packages=find_packages(),
author="superheavytail",
Expand Down

0 comments on commit 2f55e70

Please sign in to comment.