-
Notifications
You must be signed in to change notification settings - Fork 18
/
Copy pathsimple_chat.py
28 lines (21 loc) · 1.13 KB
/
simple_chat.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
# Copyright (c) 2023 - 2024, Owners of https://github.com/autogenhub
#
# SPDX-License-Identifier: Apache-2.0
#
# Portions derived from https://github.com/microsoft/autogen are under the MIT License.
# SPDX-License-Identifier: MIT
from autogen import ConversableAgent, UserProxyAgent, config_list_from_json
def main():
# Load LLM inference endpoints from an env variable or a file
# See https://ag2ai.github.io/ag2/docs/FAQ#set-your-api-endpoints
# and OAI_CONFIG_LIST_sample.
# For example, if you have created a OAI_CONFIG_LIST file in the current working directory, that file will be used.
config_list = config_list_from_json(env_or_file="OAI_CONFIG_LIST")
# Create the agent that uses the LLM.
assistant = ConversableAgent("agent", llm_config={"config_list": config_list})
# Create the agent that represents the user in the conversation.
user_proxy = UserProxyAgent("user", code_execution_config=False)
# Let the assistant start the conversation. It will end when the user types exit.
assistant.initiate_chat(user_proxy, message="How can I help you today?")
if __name__ == "__main__":
main()