diff --git a/autogen/version.py b/autogen/version.py index 7043c3b4c4b8..b243d3db22b9 100644 --- a/autogen/version.py +++ b/autogen/version.py @@ -1 +1 @@ -__version__ = "0.2.22" +__version__ = "0.2.23" diff --git a/samples/apps/websockets/README.md b/samples/apps/websockets/README.md new file mode 100644 index 000000000000..4a9302a97a49 --- /dev/null +++ b/samples/apps/websockets/README.md @@ -0,0 +1,20 @@ +# Using websockets with FastAPI and AutoGen + +## Running the example + +1. Navigate to the directory containing the example: + ``` + cd samples/apps/websockets + ``` + +2. Install the necessary dependencies: + ``` + ./setup.py + ``` + +3. Run the application: + ``` + uvicorn application:app --reload + ``` + +You should now be able to access the application in your web browser at `http://localhost:8000`. diff --git a/samples/apps/websockets/application.py b/samples/apps/websockets/application.py new file mode 100755 index 000000000000..f2e453d92482 --- /dev/null +++ b/samples/apps/websockets/application.py @@ -0,0 +1,182 @@ +#!/usr/bin/env python + +import asyncio +import logging +import os +from contextlib import asynccontextmanager # noqa: E402 +from datetime import datetime +from typing import AsyncIterator, Dict, Iterator, List + +import uvicorn # noqa: E402 +from fastapi import FastAPI # noqa: E402 +from fastapi.responses import HTMLResponse # noqa: E402 +from websockets.sync.client import connect as ws_connect + +import autogen +from autogen.io.websockets import IOWebsockets + +PORT = 8000 + +# logger = getLogger(__name__) +logger = logging.getLogger("uvicorn") + + +def _get_config_list() -> List[Dict[str, str]]: + """Get a list of config dictionaries with API keys for OpenAI and Azure OpenAI. + + Returns: + List[Dict[str, str]]: A list of config dictionaries with API keys. + + Example: + >>> _get_config_list() + [ + { + 'model': 'gpt-35-turbo-16k', + 'api_key': '0123456789abcdef0123456789abcdef', + 'base_url': 'https://my-deployment.openai.azure.com/', + 'api_type': 'azure', + 'api_version': '2024-02-15-preview', + }, + { + 'model': 'gpt-4', + 'api_key': '0123456789abcdef0123456789abcdef', + }, + ] + """ + # can use both OpenAI and Azure OpenAI API keys + config_list = [ + { + "model": "gpt-35-turbo-16k", + "api_key": os.environ.get("AZURE_OPENAI_API_KEY"), + "base_url": os.environ.get("AZURE_OPENAI_BASE_URL"), + "api_type": "azure", + "api_version": os.environ.get("AZURE_OPENAI_API_VERSION"), + }, + { + "model": "gpt-4", + "api_key": os.environ.get("OPENAI_API_KEY"), + }, + ] + # filter out configs with no API key + config_list = [llm_config for llm_config in config_list if llm_config["api_key"] is not None] + + if not config_list: + raise ValueError( + "No API keys found. Please set either AZURE_OPENAI_API_KEY or OPENAI_API_KEY environment variable." + ) + + return config_list + + +def on_connect(iostream: IOWebsockets) -> None: + logger.info(f"on_connect(): Connected to client using IOWebsockets {iostream}") + + logger.info("on_connect(): Receiving message from client.") + + # get the initial message from the client + initial_msg = iostream.input() + + # instantiate an agent named "chatbot" + agent = autogen.ConversableAgent( + name="chatbot", + system_message="Complete a task given to you and reply TERMINATE when the task is done. If asked about the weather, use tool weather_forecast(city) to get the weather forecast for a city.", + llm_config={ + "config_list": _get_config_list(), + "stream": True, + }, + ) + + # create a UserProxyAgent instance named "user_proxy" + user_proxy = autogen.UserProxyAgent( + name="user_proxy", + system_message="A proxy for the user.", + is_termination_msg=lambda x: x.get("content", "") and x.get("content", "").rstrip().endswith("TERMINATE"), + human_input_mode="NEVER", + max_consecutive_auto_reply=10, + code_execution_config=False, + ) + + # register the weather_forecast function + def weather_forecast(city: str) -> str: + return f"The weather forecast for {city} at {datetime.now()} is sunny." + + autogen.register_function( + weather_forecast, caller=agent, executor=user_proxy, description="Weather forecast for a city" + ) + + # instantiate a chat + logger.info( + f"on_connect(): Initiating chat with the agent ({agent.name}) and the user proxy ({user_proxy.name}) using the message '{initial_msg}'", + ) + user_proxy.initiate_chat( # noqa: F704 + agent, + message=initial_msg, + ) + + logger.info("on_connect(): Finished the task successfully.") + + +html = """ + + +
+