Skip to content

Commit

Permalink
Handle azure_deployment Parameter Issue in GPTAssistantAgent to Maint…
Browse files Browse the repository at this point in the history
…ain Compatibility with OpenAIWrapper (#1721)

* support getting model from both llm config and config list

* address comments

* address commentsd

---------

Co-authored-by: Chi Wang <[email protected]>
  • Loading branch information
IANTHEREAL and sonichi authored Feb 24, 2024
1 parent fbc2f6e commit fb2b412
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 13 deletions.
30 changes: 20 additions & 10 deletions autogen/agentchat/contrib/gpt_assistant_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import json
import time
import logging
import copy

from autogen import OpenAIWrapper
from autogen.oai.openai_utils import retrieve_assistants_by_name
Expand Down Expand Up @@ -52,12 +53,26 @@ def __init__(
- verbose (bool): If set to True, enables more detailed output from the assistant thread.
- Other kwargs: Except verbose, others are passed directly to ConversableAgent.
"""

self._verbose = kwargs.pop("verbose", False)
super().__init__(
name=name, system_message=instructions, human_input_mode="NEVER", llm_config=llm_config, **kwargs
)

if llm_config is False:
raise ValueError("llm_config=False is not supported for GPTAssistantAgent.")

# Use AutoGen OpenAIWrapper to create a client
openai_client_cfg = None
model_name = "gpt-4-1106-preview"
if llm_config and llm_config.get("config_list") is not None and len(llm_config["config_list"]) > 0:
openai_client_cfg = llm_config["config_list"][0].copy()
model_name = openai_client_cfg.pop("model", "gpt-4-1106-preview")
model_name = "gpt-4-0125-preview"
openai_client_cfg = copy.deepcopy(llm_config)
# GPTAssistantAgent's azure_deployment param may cause NotFoundError (404) in client.beta.assistants.list()
# See: https://github.com/microsoft/autogen/pull/1721
if openai_client_cfg.get("config_list") is not None and len(openai_client_cfg["config_list"]) > 0:
model_name = openai_client_cfg["config_list"][0].pop("model", "gpt-4-0125-preview")
else:
model_name = openai_client_cfg.pop("model", "gpt-4-0125-preview")

logger.warning("OpenAI client config of GPTAssistantAgent(%s) - model: %s", name, model_name)

oai_wrapper = OpenAIWrapper(**openai_client_cfg)
if len(oai_wrapper._clients) > 1:
Expand Down Expand Up @@ -143,11 +158,6 @@ def __init__(
# Tools are specified but overwrite_tools is False; do not update the assistant's tools
logger.warning("overwrite_tools is False. Using existing tools from assistant API.")

self._verbose = kwargs.pop("verbose", False)
super().__init__(
name=name, system_message=instructions, human_input_mode="NEVER", llm_config=llm_config, **kwargs
)

# lazily create threads
self._openai_threads = {}
self._unread_index = defaultdict(int)
Expand Down
21 changes: 18 additions & 3 deletions test/agentchat/contrib/test_gpt_assistant.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,21 @@

if not skip:
openai_config_list = autogen.config_list_from_json(
OAI_CONFIG_LIST, file_location=KEY_LOC, filter_dict={"api_type": ["openai"]}
OAI_CONFIG_LIST,
file_location=KEY_LOC,
# The Retrieval tool requires at least gpt-3.5-turbo-1106 (newer versions are supported) or gpt-4-turbo-preview models.
# https://platform.openai.com/docs/models/overview
filter_dict={
"api_type": ["openai"],
"model": [
"gpt-4-turbo-preview",
"gpt-4-0125-preview",
"gpt-4-1106-preview",
"gpt-3.5-turbo",
"gpt-3.5-turbo-0125",
"gpt-3.5-turbo-1106",
],
},
)
aoai_config_list = autogen.config_list_from_json(
OAI_CONFIG_LIST,
Expand All @@ -41,7 +55,8 @@ def test_config_list() -> None:
)
def test_gpt_assistant_chat() -> None:
for gpt_config in [openai_config_list, aoai_config_list]:
_test_gpt_assistant_chat(gpt_config)
_test_gpt_assistant_chat({"config_list": gpt_config})
_test_gpt_assistant_chat(gpt_config[0])


def _test_gpt_assistant_chat(gpt_config) -> None:
Expand All @@ -68,7 +83,7 @@ def ask_ossinsight(question: str) -> str:
name = f"For test_gpt_assistant_chat {uuid.uuid4()}"
analyst = GPTAssistantAgent(
name=name,
llm_config={"tools": [{"type": "function", "function": ossinsight_api_schema}], "config_list": gpt_config},
llm_config={"tools": [{"type": "function", "function": ossinsight_api_schema}], **gpt_config},
instructions="Hello, Open Source Project Analyst. You'll conduct comprehensive evaluations of open source projects or organizations on the GitHub platform",
)
try:
Expand Down

0 comments on commit fb2b412

Please sign in to comment.