From a38b4c892427dfa2f207383d1f036f582f9a8f9a Mon Sep 17 00:00:00 2001 From: Sameer Kankute Date: Mon, 16 Feb 2026 18:59:39 +0530 Subject: [PATCH 1/3] Add doc for OpenAI Agents SDK with LiteLLM --- .../my-website/docs/projects/openai-agents.md | 115 ++++++++++++++++-- docs/my-website/sidebars.js | 1 + 2 files changed, 108 insertions(+), 8 deletions(-) diff --git a/docs/my-website/docs/projects/openai-agents.md b/docs/my-website/docs/projects/openai-agents.md index 95a2191b883..960e8a77551 100644 --- a/docs/my-website/docs/projects/openai-agents.md +++ b/docs/my-website/docs/projects/openai-agents.md @@ -1,22 +1,121 @@ +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; -# OpenAI Agents SDK +# OpenAI Agents SDK with LiteLLM -The [OpenAI Agents SDK](https://github.com/openai/openai-agents-python) is a lightweight framework for building multi-agent workflows. -It includes an official LiteLLM extension that lets you use any of the 100+ supported providers (Anthropic, Gemini, Mistral, Bedrock, etc.) +Use OpenAI Agents SDK with any LLM provider through LiteLLM Proxy. + +The [OpenAI Agents SDK](https://github.com/openai/openai-agents-python) is a lightweight framework for building multi-agent workflows. It includes an official LiteLLM extension that lets you use any of the 100+ supported providers. + +## Quick Start + +### 1. Install Dependencies + +```bash +pip install "openai-agents[litellm]" +``` + +### 2. Add Model to Config + +```yaml title="config.yaml" +model_list: + - model_name: gpt-4o + litellm_params: + model: "openai/gpt-4o" + api_key: "os.environ/OPENAI_API_KEY" + + - model_name: claude-sonnet + litellm_params: + model: "anthropic/claude-3-5-sonnet-20241022" + api_key: "os.environ/ANTHROPIC_API_KEY" + + - model_name: gemini-pro + litellm_params: + model: "gemini/gemini-2.0-flash-exp" + api_key: "os.environ/GEMINI_API_KEY" +``` + +### 3. Start LiteLLM Proxy + +```bash +litellm --config config.yaml +``` + +### 4. Use with Proxy + + + + +```python +from agents import Agent, Runner +from agents.extensions.models.litellm_model import LitellmModel + +# Point to LiteLLM proxy +agent = Agent( + name="Assistant", + instructions="You are a helpful assistant.", + model=LitellmModel( + model="claude-sonnet", # Model from config.yaml + api_key="sk-1234", # LiteLLM API key + base_url="http://localhost:4000" + ) +) + +result = await Runner.run(agent, "What is LiteLLM?") +print(result.final_output) +``` + + + ```python from agents import Agent, Runner from agents.extensions.models.litellm_model import LitellmModel +# Use any provider directly agent = Agent( name="Assistant", instructions="You are a helpful assistant.", - model=LitellmModel(model="provider/model-name") + model=LitellmModel( + model="anthropic/claude-3-5-sonnet-20241022", + api_key="your-anthropic-key" + ) ) -result = Runner.run_sync(agent, "your_prompt_here") -print("Result:", result.final_output) +result = await Runner.run(agent, "What is LiteLLM?") +print(result.final_output) ``` -- [GitHub](https://github.com/openai/openai-agents-python) -- [LiteLLM Extension Docs](https://openai.github.io/openai-agents-python/ref/extensions/litellm/) + + + +## Track Usage + +Enable usage tracking to monitor token consumption: + +```python +from agents import Agent, ModelSettings +from agents.extensions.models.litellm_model import LitellmModel + +agent = Agent( + name="Assistant", + model=LitellmModel(model="claude-sonnet", api_key="sk-1234"), + model_settings=ModelSettings(include_usage=True) +) + +result = await Runner.run(agent, "Hello") +print(result.context_wrapper.usage) # Token counts +``` + +## Environment Variables + +| Variable | Value | Description | +|----------|-------|-------------| +| `LITELLM_BASE_URL` | `http://localhost:4000` | LiteLLM proxy URL | +| `LITELLM_API_KEY` | `sk-1234` | Your LiteLLM API key | + +## Related Resources + +- [OpenAI Agents SDK Documentation](https://openai.github.io/openai-agents-python/) +- [LiteLLM Extension Docs](https://openai.github.io/openai-agents-python/models/litellm/) +- [LiteLLM Proxy Quick Start](../proxy/quick_start) diff --git a/docs/my-website/sidebars.js b/docs/my-website/sidebars.js index 4efb2475755..5e82fa793b5 100644 --- a/docs/my-website/sidebars.js +++ b/docs/my-website/sidebars.js @@ -176,6 +176,7 @@ const sidebars = { "tutorials/copilotkit_sdk", "tutorials/google_adk", "tutorials/livekit_xai_realtime", + "litellm/docs/my-website/docs/projects/openai-agents.md" ] }, From 5b6e232da631dea63dc823aa90fd3ea4d549932b Mon Sep 17 00:00:00 2001 From: Sameer Kankute Date: Mon, 16 Feb 2026 19:00:53 +0530 Subject: [PATCH 2/3] Add doc for OpenAI Agents SDK with LiteLLM --- docs/my-website/docs/projects/openai-agents.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/my-website/docs/projects/openai-agents.md b/docs/my-website/docs/projects/openai-agents.md index 960e8a77551..86983e7e510 100644 --- a/docs/my-website/docs/projects/openai-agents.md +++ b/docs/my-website/docs/projects/openai-agents.md @@ -1,7 +1,7 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; -# OpenAI Agents SDK with LiteLLM +# OpenAI Agents SDK Use OpenAI Agents SDK with any LLM provider through LiteLLM Proxy. From be9df253dd4538c060049188f2ad0cc6de5ac4ad Mon Sep 17 00:00:00 2001 From: Sameer Kankute Date: Mon, 16 Feb 2026 19:03:15 +0530 Subject: [PATCH 3/3] Update docs/my-website/sidebars.js Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com> --- docs/my-website/sidebars.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/my-website/sidebars.js b/docs/my-website/sidebars.js index 5e82fa793b5..42996d1a3e9 100644 --- a/docs/my-website/sidebars.js +++ b/docs/my-website/sidebars.js @@ -176,7 +176,7 @@ const sidebars = { "tutorials/copilotkit_sdk", "tutorials/google_adk", "tutorials/livekit_xai_realtime", - "litellm/docs/my-website/docs/projects/openai-agents.md" + "projects/openai-agents" ] },