Skip to content

Commit

Permalink
release: 0.1.0
Browse files Browse the repository at this point in the history
Now it saves both prompts and completions
  • Loading branch information
superheavytail committed Jun 2, 2024
1 parent 5998095 commit 29e76ae
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion batched_chatgpt/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from .chatgpt_utils import call_chatgpt

__version__ = '0.0.5'
__version__ = '0.1.0'
2 changes: 1 addition & 1 deletion batched_chatgpt/chatgpt_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def batched_multiprocess_auto_retry(
outputs[remain_indices[i * chunk_size + j]] = result

# save the outputs which may be incomplete
pickle_bobj(outputs, pkl_path) if pkl_path else None
pickle_bobj({'prompts': items, 'completions': outputs}, pkl_path) if pkl_path else None

time.sleep(sleep_between_chunk) if not all(outputs) else ...
return outputs
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

setup(
name="batched_chatgpt",
version="0.0.5",
version="0.1.0",
description="Easy calling chatgpt with batched instances",
packages=find_packages(),
author="superheavytail",
Expand Down

0 comments on commit 29e76ae

Please sign in to comment.