Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add default value to the logprobs parameter #1044

Merged
merged 11 commits into from
Dec 25, 2023
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ node_modules/
*.log

# Python virtualenv
.venv
.venv*

# Byte-compiled / optimized / DLL files
__pycache__/
Expand Down
19 changes: 16 additions & 3 deletions autogen/oai/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import logging
import inspect
from flaml.automl.logger import logger_formatter
from pydantic import ValidationError

from autogen.oai.openai_utils import get_key, oai_price1k
from autogen.token_count_utils import count_token
Expand Down Expand Up @@ -329,15 +330,27 @@ def _completions_create(self, client, params):
),
)
for i in range(len(response_contents)):
response.choices.append(
Choice(
try:
# OpenAI versions 0.1.5 and above
ekzhu marked this conversation as resolved.
Show resolved Hide resolved
choice = Choice(
index=i,
finish_reason=finish_reasons[i],
message=ChatCompletionMessage(
role="assistant", content=response_contents[i], function_call=None
),
logprobs=None,
)
)
except ValidationError:
# OpenAI version up to 0.1.4
choice = Choice(
index=i,
finish_reason=finish_reasons[i],
message=ChatCompletionMessage(
role="assistant", content=response_contents[i], function_call=None
),
)

response.choices.append(choice)
else:
# If streaming is not enabled or using functions, send a regular chat completion request
# Functions are not supported, so ensure streaming is disabled
Expand Down
4 changes: 2 additions & 2 deletions test/oai/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def test_aoai_chat_completion():
config_list = config_list_from_json(
env_or_file=OAI_CONFIG_LIST,
file_location=KEY_LOC,
filter_dict={"api_type": ["azure"], "model": ["gpt-3.5-turbo"]},
filter_dict={"api_type": ["azure"], "model": ["gpt-3.5-turbo", "gpt-35-turbo"]},
)
client = OpenAIWrapper(config_list=config_list)
# for config in config_list:
Expand All @@ -38,7 +38,7 @@ def test_oai_tool_calling_extraction():
config_list = config_list_from_json(
env_or_file=OAI_CONFIG_LIST,
file_location=KEY_LOC,
filter_dict={"api_type": ["azure"], "model": ["gpt-3.5-turbo"]},
filter_dict={"api_type": ["azure"], "model": ["gpt-3.5-turbo", "gpt-35-turbo"]},
)
client = OpenAIWrapper(config_list=config_list)
response = client.create(
Expand Down
Loading