diff --git a/docs/my-website/docs/proxy/high_availability_control_plane.md b/docs/my-website/docs/proxy/high_availability_control_plane.md
new file mode 100644
index 00000000000..324fba3a180
--- /dev/null
+++ b/docs/my-website/docs/proxy/high_availability_control_plane.md
@@ -0,0 +1,190 @@
+import Tabs from '@theme/Tabs';
+import TabItem from '@theme/TabItem';
+import { ControlPlaneArchitecture } from '@site/src/components/ControlPlaneArchitecture';
+
+# [BETA] High Availability Control Plane
+
+Deploy a single LiteLLM UI that manages multiple independent LiteLLM proxy instances, each with its own database, Redis, and master key.
+
+:::info
+
+This is an Enterprise feature.
+
+[Enterprise Pricing](https://www.litellm.ai/#pricing)
+
+[Get free 7-day trial key](https://www.litellm.ai/enterprise#trial)
+
+:::
+
+## Why This Architecture?
+
+In the [standard multi-region setup](./control_plane_and_data_plane.md), all instances share a single database and master key. This works, but introduces a shared dependency. If the database goes down, every instance is affected.
+
+The **High Availability Control Plane** takes a different approach:
+
+| | Shared Database (Standard) | High Availability Control Plane |
+|---|---|---|
+| **Database** | Single shared DB for all instances | Each instance has its own DB |
+| **Redis** | Shared Redis | Each instance has its own Redis |
+| **Master Key** | Same key across all instances | Each instance has its own key |
+| **Failure isolation** | DB outage affects all instances | Failure is isolated to one instance |
+| **User management** | Centralized, one user table | Independent, each worker manages its own users |
+| **UI** | One UI per admin instance | Single control plane UI manages all workers |
+
+### Benefits
+
+- **True high availability**: no shared infrastructure means no single point of failure
+- **Blast radius containment**: a misconfiguration or outage on one worker doesn't affect others
+- **Regional isolation**: workers can run in different regions with data residency requirements
+- **Simpler operations**: each worker is a self-contained LiteLLM deployment
+
+## Architecture
+
+
+
+The **control plane** is a LiteLLM instance that serves the admin UI and knows about all the workers. It does not proxy LLM requests, it is purely for administration.
+
+Each **worker** is a fully independent LiteLLM proxy that handles LLM requests for its region or team. Workers have their own users, keys, teams, and budgets.
+
+## Setup
+
+### 1. Control Plane Configuration
+
+The control plane needs a `worker_registry` that lists all worker instances.
+
+```yaml title="cp_config.yaml"
+model_list: []
+
+general_settings:
+ master_key: sk-1234
+ database_url: os.environ/DATABASE_URL
+
+worker_registry:
+ - worker_id: "worker-a"
+ name: "Worker A"
+ url: "http://localhost:4001"
+ - worker_id: "worker-b"
+ name: "Worker B"
+ url: "http://localhost:4002"
+```
+
+Start the control plane:
+
+```bash
+litellm --config cp_config.yaml --port 4000
+```
+
+### 2. Worker Configuration
+
+Each worker needs `control_plane_url` in its `general_settings` to enable cross-origin authentication from the control plane UI.
+
+`PROXY_BASE_URL` must also be set for each worker so that SSO callback redirects resolve correctly.
+
+
+
+
+```yaml title="worker_a_config.yaml"
+model_list: []
+
+general_settings:
+ master_key: sk-worker-a-1234
+ database_url: os.environ/WORKER_A_DATABASE_URL
+ control_plane_url: "http://localhost:4000"
+```
+
+```bash
+PROXY_BASE_URL=http://localhost:4001 litellm --config worker_a_config.yaml --port 4001
+```
+
+
+
+
+```yaml title="worker_b_config.yaml"
+model_list: []
+
+general_settings:
+ master_key: sk-worker-b-1234
+ database_url: os.environ/WORKER_B_DATABASE_URL
+ control_plane_url: "http://localhost:4000"
+```
+
+```bash
+PROXY_BASE_URL=http://localhost:4002 litellm --config worker_b_config.yaml --port 4002
+```
+
+
+
+
+:::important
+Each worker must have its own `master_key` and `database_url`. The whole point of this architecture is that workers are independent.
+:::
+
+### 3. SSO Configuration (Optional)
+
+SSO is configured on the **control plane** instance the same way as a standard LiteLLM proxy. See the [SSO setup guide](./admin_ui_sso.md) for full instructions.
+
+If using SSO, make sure to register each worker URL and the control plane URL as allowed callback URLs in your SSO provider's dashboard.
+
+## How It Works
+
+### Login Flow
+
+1. User visits the control plane UI (`http://localhost:4000/ui`)
+2. The login page shows a **worker selector** dropdown listing all registered workers
+3. User selects a worker (e.g. "Worker A") and logs in with username/password or SSO
+4. The UI authenticates against the **selected worker** using the `/v3/login` endpoint
+5. On success, the UI stores the worker's JWT and points all subsequent API calls at the worker
+6. The user can now manage keys, teams, models, and budgets on that worker, all from the control plane UI
+
+### Switching Workers
+
+Once logged in, users can switch workers from the **navbar dropdown** without leaving the UI. Switching redirects back to the login page to authenticate against the new worker.
+
+### Discovery
+
+The control plane exposes a `/.well-known/litellm-ui-config` endpoint that the UI reads on load. This endpoint returns:
+- `is_control_plane: true`
+- The list of workers with their IDs, names, and URLs
+
+This is how the login page knows to show the worker selector.
+
+## Local Testing
+
+To try this out locally, start each instance in a separate terminal:
+
+```bash
+# Terminal 1: Control Plane
+litellm --config cp_config.yaml --port 4000
+
+# Terminal 2: Worker A
+PROXY_BASE_URL=http://localhost:4001 litellm --config worker_a_config.yaml --port 4001
+
+# Terminal 3: Worker B
+PROXY_BASE_URL=http://localhost:4002 litellm --config worker_b_config.yaml --port 4002
+```
+
+Then open `http://localhost:4000/ui`. You should see the worker selector on the login page.
+
+## Configuration Reference
+
+### Control Plane Settings
+
+| Field | Location | Description |
+|---|---|---|
+| `worker_registry` | Top-level config | List of worker instances |
+| `worker_registry[].worker_id` | Required | Unique identifier for the worker |
+| `worker_registry[].name` | Required | Display name shown in the UI |
+| `worker_registry[].url` | Required | Full URL of the worker instance |
+
+### Worker Settings
+
+| Field | Location | Description |
+|---|---|---|
+| `general_settings.control_plane_url` | Required | URL of the control plane instance. Enables `/v3/login` and `/v3/login/exchange` endpoints on this worker. |
+| `PROXY_BASE_URL` | Environment variable | The worker's own external URL. Required for SSO callback redirects. |
+
+## Related Documentation
+
+- [Standard Multi-Region Setup](./control_plane_and_data_plane.md) - shared-database architecture for admin/worker split
+- [SSO Setup](./admin_ui_sso.md) - configuring SSO for the admin UI
+- [Production Deployment](./prod.md) - production best practices
diff --git a/docs/my-website/sidebars.js b/docs/my-website/sidebars.js
index 4c0471fb8f4..888ec3f5f69 100644
--- a/docs/my-website/sidebars.js
+++ b/docs/my-website/sidebars.js
@@ -430,6 +430,7 @@ const sidebars = {
"proxy/architecture",
"proxy/multi_tenant_architecture",
"proxy/control_plane_and_data_plane",
+ "proxy/high_availability_control_plane",
"proxy/db_deadlocks",
"proxy/db_info",
"proxy/image_handling",
diff --git a/docs/my-website/src/components/ControlPlaneArchitecture/ControlPlaneArchitecture.tsx b/docs/my-website/src/components/ControlPlaneArchitecture/ControlPlaneArchitecture.tsx
new file mode 100644
index 00000000000..5cc41979b02
--- /dev/null
+++ b/docs/my-website/src/components/ControlPlaneArchitecture/ControlPlaneArchitecture.tsx
@@ -0,0 +1,95 @@
+import React from 'react';
+import styles from './styles.module.css';
+
+/* ────────────────────── Shared small pieces ────────────────────── */
+
+function InfraChip({ color, label }: { color: string; label: string }) {
+ const dotClass =
+ color === 'green'
+ ? styles.infraDotGreen
+ : color === 'blue'
+ ? styles.infraDotBlue
+ : styles.infraDotOrange;
+
+ return (
+
+
+ {label}
+
+ );
+}
+
+/* ────────────────────── Architecture tab ────────────────────── */
+
+function ArchitectureView() {
+ return (
+
+ {/* User */}
+
+
+
+
+ {/* Control Plane */}
+
+
+ Control Plane
+ UI
+
+
cp.example.com
+
+
+
+
+
+
+
+ {/* Branch connector */}
+
+
+ {/* Workers */}
+
+
+
+ Worker A
+ US East
+
+
worker-a.example.com
+
+
+
+
+
+
+
+
+
+ Worker B
+ EU West
+
+
worker-b.example.com
+
+
+
+
+
+
+
+
+ );
+}
+
+/* ────────────────────── Main component ────────────────────── */
+
+export default function ControlPlaneArchitecture() {
+ return (
+
+ );
+}
diff --git a/docs/my-website/src/components/ControlPlaneArchitecture/index.tsx b/docs/my-website/src/components/ControlPlaneArchitecture/index.tsx
new file mode 100644
index 00000000000..826b4d68818
--- /dev/null
+++ b/docs/my-website/src/components/ControlPlaneArchitecture/index.tsx
@@ -0,0 +1 @@
+export { default as ControlPlaneArchitecture } from './ControlPlaneArchitecture';
diff --git a/docs/my-website/src/components/ControlPlaneArchitecture/styles.module.css b/docs/my-website/src/components/ControlPlaneArchitecture/styles.module.css
new file mode 100644
index 00000000000..8400a8920e0
--- /dev/null
+++ b/docs/my-website/src/components/ControlPlaneArchitecture/styles.module.css
@@ -0,0 +1,517 @@
+/* ── Custom properties ── */
+:root {
+ --cp-bg: #ffffff;
+ --cp-border: #e5e7eb;
+ --cp-text: #1a1a2e;
+ --cp-text-secondary: #6b7280;
+ --cp-text-muted: #9ca3af;
+ --cp-accent: #3b82f6;
+ --cp-accent-light: #dbeafe;
+ --cp-accent-glow: rgba(59, 130, 246, 0.15);
+ --cp-green: #10b981;
+ --cp-green-light: #d1fae5;
+ --cp-green-glow: rgba(16, 185, 129, 0.15);
+ --cp-orange: #f59e0b;
+ --cp-orange-light: #fef3c7;
+ --cp-purple: #8b5cf6;
+ --cp-purple-light: #ede9fe;
+ --cp-red: #ef4444;
+ --cp-red-light: #fee2e2;
+ --cp-card-bg: #f9fafb;
+ --cp-infra-bg: #f1f5f9;
+ --cp-infra-border: #cbd5e1;
+ --cp-connector: #d1d5db;
+ --cp-dot-size: 8px;
+}
+
+[data-theme='dark'] {
+ --cp-bg: #111827;
+ --cp-border: #374151;
+ --cp-text: #e5e7eb;
+ --cp-text-secondary: #9ca3af;
+ --cp-text-muted: #6b7280;
+ --cp-accent: #60a5fa;
+ --cp-accent-light: #1e3a5f;
+ --cp-accent-glow: rgba(96, 165, 250, 0.2);
+ --cp-green: #34d399;
+ --cp-green-light: #064e3b;
+ --cp-green-glow: rgba(52, 211, 153, 0.2);
+ --cp-orange: #fbbf24;
+ --cp-orange-light: #78350f;
+ --cp-purple: #a78bfa;
+ --cp-purple-light: #3b0764;
+ --cp-red: #f87171;
+ --cp-red-light: #451a1a;
+ --cp-card-bg: #1f2937;
+ --cp-infra-bg: #1e293b;
+ --cp-infra-border: #475569;
+ --cp-connector: #4b5563;
+}
+
+/* ── Wrapper ── */
+.wrapper {
+ margin: 1.5rem 0;
+ font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
+}
+
+/* ── Tab bar ── */
+.tabs {
+ display: flex;
+ gap: 0;
+ margin-bottom: 1.5rem;
+ border-bottom: 2px solid var(--cp-border);
+}
+
+.tab {
+ padding: 0.6rem 1.25rem;
+ font-size: 0.85rem;
+ font-weight: 600;
+ color: var(--cp-text-secondary);
+ background: none;
+ border: none;
+ border-bottom: 2px solid transparent;
+ margin-bottom: -2px;
+ cursor: pointer;
+ transition: color 0.2s, border-color 0.2s;
+}
+
+.tab:hover {
+ color: var(--cp-text);
+}
+
+.tabActive {
+ color: var(--cp-accent);
+ border-bottom-color: var(--cp-accent);
+}
+
+/* ── Architecture diagram ── */
+.diagram {
+ display: flex;
+ flex-direction: column;
+ align-items: center;
+ gap: 0;
+}
+
+/* ── User icon ── */
+.userRow {
+ display: flex;
+ flex-direction: column;
+ align-items: center;
+ margin-bottom: 0.5rem;
+}
+
+.userIcon {
+ width: 40px;
+ height: 40px;
+ border-radius: 50%;
+ background: var(--cp-accent-light);
+ border: 2px solid var(--cp-accent);
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ font-size: 1.1rem;
+}
+
+.userLabel {
+ font-size: 0.75rem;
+ color: var(--cp-text-secondary);
+ margin-top: 0.3rem;
+ font-weight: 500;
+}
+
+/* ── Connectors ── */
+.connectorDown {
+ width: 2px;
+ height: 28px;
+ background: var(--cp-connector);
+ position: relative;
+}
+
+.connectorDown::after {
+ content: '';
+ position: absolute;
+ bottom: -4px;
+ left: 50%;
+ transform: translateX(-50%);
+ width: 0;
+ height: 0;
+ border-left: 5px solid transparent;
+ border-right: 5px solid transparent;
+ border-top: 5px solid var(--cp-connector);
+}
+
+.connectorBranch {
+ display: flex;
+ align-items: flex-start;
+ justify-content: center;
+ position: relative;
+ width: 100%;
+ max-width: 700px;
+ height: 36px;
+}
+
+.connectorBranch::before {
+ content: '';
+ position: absolute;
+ top: 0;
+ left: 50%;
+ width: 2px;
+ height: 12px;
+ background: var(--cp-connector);
+ transform: translateX(-50%);
+}
+
+.connectorBranch::after {
+ content: '';
+ position: absolute;
+ top: 12px;
+ left: calc(25% + 12px);
+ right: calc(25% + 12px);
+ height: 2px;
+ background: var(--cp-connector);
+}
+
+.branchLeg {
+ position: absolute;
+ top: 12px;
+ width: 2px;
+ height: 24px;
+ background: var(--cp-connector);
+}
+
+.branchLeg::after {
+ content: '';
+ position: absolute;
+ bottom: -4px;
+ left: 50%;
+ transform: translateX(-50%);
+ width: 0;
+ height: 0;
+ border-left: 5px solid transparent;
+ border-right: 5px solid transparent;
+ border-top: 5px solid var(--cp-connector);
+}
+
+.branchLegLeft {
+ left: calc(25% + 12px);
+}
+
+.branchLegRight {
+ right: calc(25% + 12px);
+}
+
+/* ── Node cards ── */
+.node {
+ border: 2px solid var(--cp-border);
+ border-radius: 12px;
+ background: var(--cp-card-bg);
+ padding: 1rem 1.25rem;
+ text-align: center;
+ transition: border-color 0.3s, box-shadow 0.3s;
+ position: relative;
+}
+
+.nodeControlPlane {
+ border-color: var(--cp-accent);
+ box-shadow: 0 0 0 3px var(--cp-accent-glow);
+ min-width: 280px;
+}
+
+.nodeWorker {
+ min-width: 220px;
+}
+
+.nodeWorkerA {
+ border-color: var(--cp-green);
+ box-shadow: 0 0 0 3px var(--cp-green-glow);
+}
+
+.nodeWorkerB {
+ border-color: var(--cp-purple);
+ box-shadow: 0 0 0 3px rgba(139, 92, 246, 0.15);
+}
+
+[data-theme='dark'] .nodeWorkerB {
+ box-shadow: 0 0 0 3px rgba(167, 139, 250, 0.2);
+}
+
+.nodeHeader {
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ gap: 0.5rem;
+ margin-bottom: 0.5rem;
+}
+
+.nodeIcon {
+ font-size: 1.1rem;
+}
+
+.nodeTitle {
+ font-size: 0.95rem;
+ font-weight: 700;
+ color: var(--cp-text);
+}
+
+.nodeSubtitle {
+ font-size: 0.75rem;
+ color: var(--cp-text-secondary);
+ margin-bottom: 0.75rem;
+}
+
+.badge {
+ display: inline-block;
+ font-size: 0.65rem;
+ font-weight: 600;
+ padding: 0.15rem 0.5rem;
+ border-radius: 9999px;
+ text-transform: uppercase;
+ letter-spacing: 0.04em;
+}
+
+.badgeBlue {
+ background: var(--cp-accent-light);
+ color: var(--cp-accent);
+}
+
+.badgeGreen {
+ background: var(--cp-green-light);
+ color: var(--cp-green);
+}
+
+.badgePurple {
+ background: var(--cp-purple-light);
+ color: var(--cp-purple);
+}
+
+/* ── Infrastructure chips ── */
+.infraRow {
+ display: flex;
+ gap: 0.4rem;
+ justify-content: center;
+ flex-wrap: wrap;
+ margin-top: 0.5rem;
+}
+
+.infraChip {
+ display: flex;
+ align-items: center;
+ gap: 0.3rem;
+ font-size: 0.7rem;
+ font-weight: 500;
+ color: var(--cp-text-secondary);
+ background: var(--cp-infra-bg);
+ border: 1px solid var(--cp-infra-border);
+ border-radius: 6px;
+ padding: 0.2rem 0.5rem;
+}
+
+.infraDot {
+ width: 6px;
+ height: 6px;
+ border-radius: 50%;
+ flex-shrink: 0;
+}
+
+.infraDotGreen {
+ background: var(--cp-green);
+}
+
+.infraDotBlue {
+ background: var(--cp-accent);
+}
+
+.infraDotOrange {
+ background: var(--cp-orange);
+}
+
+/* ── Workers row ── */
+.workersRow {
+ display: flex;
+ gap: 2rem;
+ justify-content: center;
+ flex-wrap: wrap;
+}
+
+/* ── Animated flow ── */
+.flowLabel {
+ font-size: 0.7rem;
+ color: var(--cp-accent);
+ font-weight: 600;
+ position: absolute;
+ white-space: nowrap;
+}
+
+/* ── Comparison view ── */
+.comparisonGrid {
+ display: grid;
+ grid-template-columns: 1fr 1fr;
+ gap: 1.5rem;
+ margin-top: 0.5rem;
+}
+
+.comparisonColumn {
+ border: 2px solid var(--cp-border);
+ border-radius: 12px;
+ padding: 1.25rem;
+ background: var(--cp-card-bg);
+}
+
+.comparisonColumnOld {
+ border-color: var(--cp-red);
+}
+
+.comparisonColumnNew {
+ border-color: var(--cp-green);
+}
+
+.comparisonTitle {
+ font-size: 0.9rem;
+ font-weight: 700;
+ color: var(--cp-text);
+ text-align: center;
+ margin-bottom: 1rem;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ gap: 0.4rem;
+}
+
+.comparisonTitleOld {
+ color: var(--cp-red);
+}
+
+.comparisonTitleNew {
+ color: var(--cp-green);
+}
+
+/* ── Mini diagram inside comparison ── */
+.miniDiagram {
+ display: flex;
+ flex-direction: column;
+ align-items: center;
+ gap: 0.5rem;
+}
+
+.miniNode {
+ border: 1.5px solid var(--cp-border);
+ border-radius: 8px;
+ background: var(--cp-bg);
+ padding: 0.5rem 0.75rem;
+ text-align: center;
+ font-size: 0.75rem;
+ font-weight: 600;
+ color: var(--cp-text);
+ width: 100%;
+ max-width: 180px;
+}
+
+.miniNodeHighlight {
+ border-color: var(--cp-accent);
+ background: var(--cp-accent-light);
+}
+
+.miniNodeDanger {
+ border-color: var(--cp-red);
+ background: var(--cp-red-light);
+}
+
+.miniNodeSuccess {
+ border-color: var(--cp-green);
+ background: var(--cp-green-light);
+}
+
+.miniConnector {
+ width: 1.5px;
+ height: 16px;
+ background: var(--cp-connector);
+}
+
+.miniWorkersRow {
+ display: flex;
+ gap: 0.5rem;
+ justify-content: center;
+ width: 100%;
+}
+
+.miniWorkerStack {
+ display: flex;
+ flex-direction: column;
+ align-items: center;
+ gap: 0.3rem;
+ flex: 1;
+ max-width: 140px;
+}
+
+.miniInfra {
+ font-size: 0.65rem;
+ color: var(--cp-text-muted);
+ font-weight: 500;
+}
+
+.miniInfraShared {
+ color: var(--cp-red);
+ font-weight: 600;
+}
+
+.miniInfraOwn {
+ color: var(--cp-green);
+ font-weight: 600;
+}
+
+/* ── Callout box ── */
+.callout {
+ display: flex;
+ align-items: flex-start;
+ gap: 0.6rem;
+ padding: 0.75rem 1rem;
+ border-radius: 8px;
+ margin-top: 1rem;
+ font-size: 0.8rem;
+ color: var(--cp-text);
+ line-height: 1.5;
+}
+
+.calloutDanger {
+ background: var(--cp-red-light);
+ border: 1px solid var(--cp-red);
+}
+
+.calloutSuccess {
+ background: var(--cp-green-light);
+ border: 1px solid var(--cp-green);
+}
+
+.calloutIcon {
+ font-size: 1rem;
+ flex-shrink: 0;
+ margin-top: 0.1rem;
+}
+
+/* ── Responsive ── */
+@media (max-width: 768px) {
+ .comparisonGrid {
+ grid-template-columns: 1fr;
+ }
+
+ .workersRow {
+ flex-direction: column;
+ align-items: center;
+ }
+
+ .nodeControlPlane {
+ min-width: auto;
+ width: 100%;
+ max-width: 300px;
+ }
+
+ .nodeWorker {
+ min-width: auto;
+ width: 100%;
+ max-width: 260px;
+ }
+
+ .connectorBranch {
+ display: none;
+ }
+}
diff --git a/litellm/__init__.py b/litellm/__init__.py
index 7f72e0b0e89..e45d926e8db 100644
--- a/litellm/__init__.py
+++ b/litellm/__init__.py
@@ -97,6 +97,7 @@
success_callback: List[CALLBACK_TYPES] = []
failure_callback: List[CALLBACK_TYPES] = []
service_callback: List[CALLBACK_TYPES] = []
+audit_log_callbacks: List[CALLBACK_TYPES] = []
# logging_callback_manager is lazy-loaded via __getattr__
_custom_logger_compatible_callbacks_literal = Literal[
"lago",
diff --git a/litellm/_logging.py b/litellm/_logging.py
index 18c3bcb7e87..65e6045b0b1 100644
--- a/litellm/_logging.py
+++ b/litellm/_logging.py
@@ -52,6 +52,17 @@ def _build_secret_patterns() -> re.Pattern:
r"(?<=://)[^\s'\"]*:[^\s'\"@]+(?=@)",
# Databricks personal access tokens
r"dapi[0-9a-f]{32}",
+ # ── Key-name-based redaction ──
+ # Catches secrets inside dicts/config dumps by matching on the KEY name
+ # regardless of what the value looks like.
+ # e.g. 'master_key': 'any-value-here', "database_url": "postgres://..."
+ r"(?:master_key|database_url|db_url|connection_string|"
+ r"private_key|signing_key|encryption_key|"
+ r"auth_token|access_token|refresh_token|"
+ r"slack_webhook_url|webhook_url|"
+ r"database_connection_string|"
+ r"huggingface_token|jwt_secret)"
+ r"""['\"]?\s*[:=]\s*['\"]?[^\s,'\"})\]{}>]+""",
]
return re.compile("|".join(patterns), re.IGNORECASE)
@@ -272,7 +283,7 @@ def async_json_exception_handler(loop, context):
verbose_router_logger = logging.getLogger("LiteLLM Router")
verbose_logger = logging.getLogger("LiteLLM")
-# Add the handler to the logger
+# Add the handler to the loggers
verbose_router_logger.addHandler(handler)
verbose_proxy_logger.addHandler(handler)
verbose_logger.addHandler(handler)
diff --git a/litellm/integrations/custom_logger.py b/litellm/integrations/custom_logger.py
index 06ba9675ca2..cccabf53e51 100644
--- a/litellm/integrations/custom_logger.py
+++ b/litellm/integrations/custom_logger.py
@@ -27,6 +27,7 @@
LLMResponseTypes,
ModelResponse,
ModelResponseStream,
+ StandardAuditLogPayload,
StandardCallbackDynamicParams,
StandardLoggingPayload,
)
@@ -177,6 +178,10 @@ async def async_log_success_event(self, kwargs, response_obj, start_time, end_ti
async def async_log_failure_event(self, kwargs, response_obj, start_time, end_time):
pass
+ async def async_log_audit_log_event(self, audit_log: "StandardAuditLogPayload"):
+ """Called when an audit log is created. Override in subclasses to handle."""
+ pass
+
#### PROMPT MANAGEMENT HOOKS ####
async def async_get_chat_completion_prompt(
diff --git a/litellm/integrations/s3_v2.py b/litellm/integrations/s3_v2.py
index c8db4be7cea..405bf9698cc 100644
--- a/litellm/integrations/s3_v2.py
+++ b/litellm/integrations/s3_v2.py
@@ -22,7 +22,7 @@
httpxSpecialProvider,
)
from litellm.types.integrations.s3_v2 import s3BatchLoggingElement
-from litellm.types.utils import StandardLoggingPayload
+from litellm.types.utils import StandardAuditLogPayload, StandardLoggingPayload
from .custom_batch_logger import CustomBatchLogger
@@ -248,6 +248,38 @@ async def async_log_failure_event(self, kwargs, response_obj, start_time, end_ti
)
pass
+ async def async_log_audit_log_event(
+ self, audit_log: StandardAuditLogPayload
+ ) -> None:
+ """Batch audit logs and upload to S3 under audit_logs/ prefix."""
+ try:
+ from datetime import timezone
+
+ now = datetime.now(timezone.utc)
+ audit_log_id = audit_log.get("id", "unknown")
+
+ s3_path = cast(Optional[str], self.s3_path) or ""
+ s3_path = s3_path.rstrip("/") + "/" if s3_path else ""
+
+ s3_object_key = (
+ f"{s3_path}audit_logs/"
+ f"{now.strftime('%Y-%m-%d')}/"
+ f"{now.strftime('%H-%M-%S')}_{audit_log_id}.json"
+ )
+
+ element = s3BatchLoggingElement(
+ payload=dict(audit_log),
+ s3_object_key=s3_object_key,
+ s3_object_download_filename=f"audit-{audit_log_id}.json",
+ )
+
+ self.log_queue.append(element)
+
+ if len(self.log_queue) >= self.batch_size:
+ await self.flush_queue()
+ except Exception as e:
+ verbose_logger.exception("S3 audit log error: %s", e)
+
async def _async_log_event_base(self, kwargs, response_obj, start_time, end_time):
try:
verbose_logger.debug(
diff --git a/litellm/proxy/management_helpers/audit_logs.py b/litellm/proxy/management_helpers/audit_logs.py
index ea082f468ae..b9020222f1f 100644
--- a/litellm/proxy/management_helpers/audit_logs.py
+++ b/litellm/proxy/management_helpers/audit_logs.py
@@ -2,12 +2,15 @@
Functions to create audit logs for LiteLLM Proxy
"""
+import asyncio
import json
-from litellm._uuid import uuid
from datetime import datetime, timezone
+from typing import Dict
import litellm
from litellm._logging import verbose_proxy_logger
+from litellm._uuid import uuid
+from litellm.integrations.custom_logger import CustomLogger
from litellm.proxy._types import (
AUDIT_ACTIONS,
LiteLLM_AuditLogs,
@@ -15,6 +18,93 @@
Optional,
UserAPIKeyAuth,
)
+from litellm.types.utils import StandardAuditLogPayload
+
+_audit_log_callback_cache: Dict[str, CustomLogger] = {}
+
+
+def _resolve_audit_log_callback(name: str) -> Optional[CustomLogger]:
+ """Resolve a string callback name to a CustomLogger instance, with caching."""
+ if name in _audit_log_callback_cache:
+ return _audit_log_callback_cache[name]
+
+ from litellm.litellm_core_utils.litellm_logging import (
+ _init_custom_logger_compatible_class,
+ )
+
+ instance = _init_custom_logger_compatible_class(
+ logging_integration=name, # type: ignore
+ internal_usage_cache=None,
+ llm_router=None,
+ )
+
+ if instance is not None:
+ _audit_log_callback_cache[name] = instance
+ return instance
+
+
+def _build_audit_log_payload(
+ request_data: LiteLLM_AuditLogs,
+) -> StandardAuditLogPayload:
+ """Convert LiteLLM_AuditLogs to StandardAuditLogPayload for callback dispatch."""
+ updated_at = ""
+ if request_data.updated_at is not None:
+ updated_at = request_data.updated_at.isoformat()
+
+ table_name_str: str = request_data.table_name.value if isinstance(request_data.table_name, LitellmTableNames) else str(request_data.table_name)
+
+ return StandardAuditLogPayload(
+ id=request_data.id,
+ updated_at=updated_at,
+ changed_by=request_data.changed_by or "",
+ changed_by_api_key=request_data.changed_by_api_key or "",
+ action=request_data.action,
+ table_name=table_name_str,
+ object_id=request_data.object_id,
+ before_value=request_data.before_value,
+ updated_values=request_data.updated_values,
+ )
+
+
+def _audit_log_task_done_callback(task: asyncio.Task) -> None:
+ """Log exceptions from audit log callback tasks so they don't slip through silently."""
+ try:
+ exc = task.exception()
+ except asyncio.CancelledError:
+ return
+ if exc is not None:
+ verbose_proxy_logger.error(
+ "Audit log callback task failed: %s", exc, exc_info=exc
+ )
+
+
+async def _dispatch_audit_log_to_callbacks(
+ request_data: LiteLLM_AuditLogs,
+) -> None:
+ """Dispatch audit log to all registered audit_log_callbacks."""
+ if not litellm.audit_log_callbacks:
+ return
+
+ payload = _build_audit_log_payload(request_data)
+
+ for callback in litellm.audit_log_callbacks:
+ try:
+ resolved: Optional[CustomLogger] = callback if isinstance(callback, CustomLogger) else None
+ if isinstance(callback, str):
+ resolved = _resolve_audit_log_callback(callback)
+ if resolved is None:
+ verbose_proxy_logger.warning(
+ "Could not resolve audit log callback: %s", callback
+ )
+ continue
+
+ if isinstance(resolved, CustomLogger):
+ task = asyncio.create_task(resolved.async_log_audit_log_event(payload))
+ task.add_done_callback(_audit_log_task_done_callback)
+ except Exception as e:
+ verbose_proxy_logger.error(
+ "Failed dispatching audit log to callback: %s", e
+ )
async def create_object_audit_log(
@@ -40,20 +130,24 @@ async def create_object_audit_log(
"""
from litellm.secret_managers.main import get_secret_bool
- store_audit_logs = litellm.store_audit_logs or get_secret_bool(
+ _store_audit_logs: Optional[bool] = litellm.store_audit_logs or get_secret_bool(
"LITELLM_STORE_AUDIT_LOGS"
)
- if store_audit_logs is not True:
+ if _store_audit_logs is not True:
return
+ _changed_by = (
+ litellm_changed_by
+ or user_api_key_dict.user_id
+ or litellm_proxy_admin_name
+ )
+
await create_audit_log_for_update(
request_data=LiteLLM_AuditLogs(
id=str(uuid.uuid4()),
updated_at=datetime.now(timezone.utc),
- changed_by=litellm_changed_by
- or user_api_key_dict.user_id
- or litellm_proxy_admin_name,
+ changed_by=_changed_by,
changed_by_api_key=user_api_key_dict.api_key,
table_name=table_name,
object_id=object_id,
@@ -70,10 +164,10 @@ async def create_audit_log_for_update(request_data: LiteLLM_AuditLogs):
"""
from litellm.secret_managers.main import get_secret_bool
- store_audit_logs = litellm.store_audit_logs or get_secret_bool(
+ _store_audit_logs: Optional[bool] = litellm.store_audit_logs or get_secret_bool(
"LITELLM_STORE_AUDIT_LOGS"
)
- if store_audit_logs is not True:
+ if _store_audit_logs is not True:
return
from litellm.proxy.proxy_server import premium_user, prisma_client
@@ -81,9 +175,6 @@ async def create_audit_log_for_update(request_data: LiteLLM_AuditLogs):
if premium_user is not True:
return
- if prisma_client is None:
- raise Exception("prisma_client is None, no DB connected")
-
verbose_proxy_logger.debug("creating audit log for %s", request_data)
if isinstance(request_data.updated_values, dict):
@@ -92,6 +183,15 @@ async def create_audit_log_for_update(request_data: LiteLLM_AuditLogs):
if isinstance(request_data.before_value, dict):
request_data.before_value = json.dumps(request_data.before_value)
+ # Dispatch to external audit log callbacks regardless of DB availability
+ await _dispatch_audit_log_to_callbacks(request_data)
+
+ if prisma_client is None:
+ verbose_proxy_logger.error(
+ "prisma_client is None, cannot write audit log to DB"
+ )
+ return
+
_request_data = request_data.model_dump(exclude_none=True)
try:
@@ -103,5 +203,3 @@ async def create_audit_log_for_update(request_data: LiteLLM_AuditLogs):
except Exception as e:
# [Non-Blocking Exception. Do not allow blocking LLM API call]
verbose_proxy_logger.error(f"Failed Creating audit log {e}")
-
- return
diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py
index e982c934aa6..61fb12a855d 100644
--- a/litellm/proxy/proxy_server.py
+++ b/litellm/proxy/proxy_server.py
@@ -2961,6 +2961,29 @@ async def load_config( # noqa: PLR0915
print( # noqa
f"{blue_color_code} Initialized Failure Callbacks - {litellm.failure_callback} {reset_color_code}"
) # noqa
+ elif key == "audit_log_callbacks":
+ litellm.audit_log_callbacks = []
+
+ for callback in value:
+ if "." in callback:
+ litellm.audit_log_callbacks.append(
+ get_instance_fn(value=callback)
+ )
+ else:
+ litellm.audit_log_callbacks.append(callback)
+
+ _store_audit_logs = litellm_settings.get(
+ "store_audit_logs", litellm.store_audit_logs
+ )
+ if _store_audit_logs:
+ print( # noqa
+ f"{blue_color_code} Initialized Audit Log Callbacks - {litellm.audit_log_callbacks} {reset_color_code}"
+ ) # noqa
+ else:
+ verbose_proxy_logger.warning(
+ "'audit_log_callbacks' is configured but 'store_audit_logs' is not enabled. "
+ "Audit log callbacks will not fire until 'store_audit_logs: true' is added to litellm_settings."
+ )
elif key == "cache_params":
# this is set in the cache branch
# see usage here: https://docs.litellm.ai/docs/proxy/caching
diff --git a/litellm/types/utils.py b/litellm/types/utils.py
index 38425c7ac4a..e7f0cd77143 100644
--- a/litellm/types/utils.py
+++ b/litellm/types/utils.py
@@ -2804,6 +2804,20 @@ class StandardLoggingPayloadStatusFields(TypedDict, total=False):
"""
+class StandardAuditLogPayload(TypedDict):
+ """Payload for audit log events dispatched to external callbacks."""
+
+ id: str
+ updated_at: str # ISO-8601
+ changed_by: str
+ changed_by_api_key: str
+ action: str # "created" | "updated" | "deleted" | "blocked" | "rotated"
+ table_name: str
+ object_id: str
+ before_value: Optional[str]
+ updated_values: Optional[str]
+
+
class StandardLoggingPayload(TypedDict):
id: str
trace_id: str # Trace multiple LLM calls belonging to same overall request (e.g. fallbacks/retries)
diff --git a/tests/test_litellm/proxy/management_helpers/test_audit_log_callbacks.py b/tests/test_litellm/proxy/management_helpers/test_audit_log_callbacks.py
new file mode 100644
index 00000000000..85eda4368cd
--- /dev/null
+++ b/tests/test_litellm/proxy/management_helpers/test_audit_log_callbacks.py
@@ -0,0 +1,345 @@
+"""
+Tests for audit log callback dispatch.
+
+Tests the flow: create_audit_log_for_update -> _dispatch_audit_log_to_callbacks -> CustomLogger.async_log_audit_log_event
+"""
+
+import asyncio
+import json
+from datetime import datetime, timezone
+from unittest.mock import AsyncMock, MagicMock, patch
+
+import pytest
+
+import litellm
+from litellm.integrations.custom_logger import CustomLogger
+from litellm.proxy._types import LiteLLM_AuditLogs, LitellmTableNames
+from litellm.proxy.management_helpers.audit_logs import (
+ _audit_log_task_done_callback,
+ _build_audit_log_payload,
+ _dispatch_audit_log_to_callbacks,
+ create_audit_log_for_update,
+)
+from litellm.types.utils import StandardAuditLogPayload
+
+
+@pytest.fixture(autouse=True)
+def reset_audit_log_callbacks():
+ """Reset audit_log_callbacks before and after each test."""
+ original = litellm.audit_log_callbacks
+ litellm.audit_log_callbacks = []
+ yield
+ litellm.audit_log_callbacks = original
+
+
+def _make_audit_log(
+ action: str = "created",
+ table_name: LitellmTableNames = LitellmTableNames.TEAM_TABLE_NAME,
+) -> LiteLLM_AuditLogs:
+ return LiteLLM_AuditLogs(
+ id="test-audit-id",
+ updated_at=datetime(2026, 3, 9, 12, 0, 0, tzinfo=timezone.utc),
+ changed_by="user-123",
+ changed_by_api_key="sk-abc",
+ action=action,
+ table_name=table_name,
+ object_id="team-456",
+ updated_values=json.dumps({"name": "new-team"}),
+ before_value=json.dumps({"name": "old-team"}),
+ )
+
+
+class TestBuildAuditLogPayload:
+ def test_builds_correct_payload(self):
+ audit_log = _make_audit_log()
+ payload = _build_audit_log_payload(audit_log)
+
+ assert payload["id"] == "test-audit-id"
+ assert payload["updated_at"] == "2026-03-09T12:00:00+00:00"
+ assert payload["changed_by"] == "user-123"
+ assert payload["changed_by_api_key"] == "sk-abc"
+ assert payload["action"] == "created"
+ assert payload["table_name"] == "LiteLLM_TeamTable"
+ assert payload["object_id"] == "team-456"
+ assert payload["updated_values"] == json.dumps({"name": "new-team"})
+ assert payload["before_value"] == json.dumps({"name": "old-team"})
+
+ def test_handles_none_values(self):
+ audit_log = LiteLLM_AuditLogs(
+ id="test-id",
+ updated_at=datetime(2026, 1, 1, tzinfo=timezone.utc),
+ changed_by=None,
+ changed_by_api_key=None,
+ action="deleted",
+ table_name=LitellmTableNames.KEY_TABLE_NAME,
+ object_id="key-789",
+ updated_values=None,
+ before_value=None,
+ )
+ payload = _build_audit_log_payload(audit_log)
+
+ assert payload["changed_by"] == ""
+ assert payload["changed_by_api_key"] == ""
+ assert payload["before_value"] is None
+ assert payload["updated_values"] is None
+
+
+class TestDispatchAuditLogToCallbacks:
+ @pytest.mark.asyncio
+ async def test_dispatches_to_custom_logger_instance(self):
+ mock_logger = MagicMock(spec=CustomLogger)
+ mock_logger.async_log_audit_log_event = AsyncMock()
+ litellm.audit_log_callbacks = [mock_logger]
+
+ audit_log = _make_audit_log()
+ await _dispatch_audit_log_to_callbacks(audit_log)
+
+ # Let asyncio.create_task run
+ await asyncio.sleep(0.1)
+
+ mock_logger.async_log_audit_log_event.assert_called_once()
+ payload = mock_logger.async_log_audit_log_event.call_args[0][0]
+ assert payload["id"] == "test-audit-id"
+ assert payload["action"] == "created"
+
+ @pytest.mark.asyncio
+ async def test_no_dispatch_when_callbacks_empty(self):
+ litellm.audit_log_callbacks = []
+ audit_log = _make_audit_log()
+ # Should return immediately without error
+ await _dispatch_audit_log_to_callbacks(audit_log)
+
+ @pytest.mark.asyncio
+ async def test_resolves_string_callback(self):
+ mock_logger = MagicMock(spec=CustomLogger)
+ mock_logger.async_log_audit_log_event = AsyncMock()
+
+ litellm.audit_log_callbacks = ["s3_v2"]
+
+ with patch(
+ "litellm.proxy.management_helpers.audit_logs._resolve_audit_log_callback",
+ return_value=mock_logger,
+ ):
+ audit_log = _make_audit_log()
+ await _dispatch_audit_log_to_callbacks(audit_log)
+ await asyncio.sleep(0.1)
+
+ mock_logger.async_log_audit_log_event.assert_called_once()
+
+ @pytest.mark.asyncio
+ async def test_nonblocking_on_callback_failure(self):
+ """Callback errors should not propagate."""
+ mock_logger = MagicMock(spec=CustomLogger)
+ mock_logger.async_log_audit_log_event = AsyncMock(
+ side_effect=RuntimeError("boom")
+ )
+ litellm.audit_log_callbacks = [mock_logger]
+
+ audit_log = _make_audit_log()
+ # Should not raise
+ await _dispatch_audit_log_to_callbacks(audit_log)
+ await asyncio.sleep(0.1)
+
+ @pytest.mark.asyncio
+ async def test_skips_unresolvable_string_callback(self):
+ litellm.audit_log_callbacks = ["nonexistent_callback"]
+
+ with patch(
+ "litellm.proxy.management_helpers.audit_logs._resolve_audit_log_callback",
+ return_value=None,
+ ):
+ audit_log = _make_audit_log()
+ # Should not raise
+ await _dispatch_audit_log_to_callbacks(audit_log)
+
+
+class TestCreateAuditLogForUpdateWithCallbacks:
+ @pytest.mark.asyncio
+ async def test_dispatches_to_callbacks_after_db_write(self):
+ mock_logger = MagicMock(spec=CustomLogger)
+ mock_logger.async_log_audit_log_event = AsyncMock()
+ litellm.audit_log_callbacks = [mock_logger]
+
+ with patch("litellm.proxy.proxy_server.premium_user", True), patch(
+ "litellm.store_audit_logs", True
+ ), patch("litellm.proxy.proxy_server.prisma_client") as mock_prisma:
+ mock_prisma.db.litellm_auditlog.create = AsyncMock()
+
+ audit_log = _make_audit_log()
+ await create_audit_log_for_update(audit_log)
+ await asyncio.sleep(0.1)
+
+ # DB write should happen
+ mock_prisma.db.litellm_auditlog.create.assert_called_once()
+ # Callback should also be called
+ mock_logger.async_log_audit_log_event.assert_called_once()
+
+ @pytest.mark.asyncio
+ async def test_no_dispatch_when_not_premium(self):
+ mock_logger = MagicMock(spec=CustomLogger)
+ mock_logger.async_log_audit_log_event = AsyncMock()
+ litellm.audit_log_callbacks = [mock_logger]
+
+ with patch("litellm.proxy.proxy_server.premium_user", False), patch(
+ "litellm.store_audit_logs", True
+ ):
+ audit_log = _make_audit_log()
+ await create_audit_log_for_update(audit_log)
+ await asyncio.sleep(0.1)
+
+ mock_logger.async_log_audit_log_event.assert_not_called()
+
+ @pytest.mark.asyncio
+ async def test_no_dispatch_when_store_audit_logs_false(self):
+ mock_logger = MagicMock(spec=CustomLogger)
+ mock_logger.async_log_audit_log_event = AsyncMock()
+ litellm.audit_log_callbacks = [mock_logger]
+
+ with patch("litellm.store_audit_logs", False):
+ audit_log = _make_audit_log()
+ await create_audit_log_for_update(audit_log)
+ await asyncio.sleep(0.1)
+
+ mock_logger.async_log_audit_log_event.assert_not_called()
+
+ @pytest.mark.asyncio
+ async def test_dispatches_even_when_prisma_client_is_none(self):
+ """Callbacks should fire even if DB is unavailable."""
+ mock_logger = MagicMock(spec=CustomLogger)
+ mock_logger.async_log_audit_log_event = AsyncMock()
+ litellm.audit_log_callbacks = [mock_logger]
+
+ with patch("litellm.proxy.proxy_server.premium_user", True), patch(
+ "litellm.store_audit_logs", True
+ ), patch("litellm.proxy.proxy_server.prisma_client", None):
+ audit_log = _make_audit_log()
+ await create_audit_log_for_update(audit_log)
+ await asyncio.sleep(0.1)
+
+ # Callback should still be called despite no DB
+ mock_logger.async_log_audit_log_event.assert_called_once()
+
+ @pytest.mark.asyncio
+ async def test_dispatches_even_when_db_write_fails(self):
+ """Callbacks should fire even if the DB write raises."""
+ mock_logger = MagicMock(spec=CustomLogger)
+ mock_logger.async_log_audit_log_event = AsyncMock()
+ litellm.audit_log_callbacks = [mock_logger]
+
+ with patch("litellm.proxy.proxy_server.premium_user", True), patch(
+ "litellm.store_audit_logs", True
+ ), patch("litellm.proxy.proxy_server.prisma_client") as mock_prisma:
+ mock_prisma.db.litellm_auditlog.create = AsyncMock(
+ side_effect=RuntimeError("DB connection lost")
+ )
+
+ audit_log = _make_audit_log()
+ await create_audit_log_for_update(audit_log)
+ await asyncio.sleep(0.1)
+
+ # Callback should still be called despite DB failure
+ mock_logger.async_log_audit_log_event.assert_called_once()
+
+
+class TestAuditLogTaskDoneCallback:
+ def test_logs_exception_from_failed_task(self):
+ """Done callback should log task exceptions."""
+ mock_task = MagicMock(spec=asyncio.Task)
+ mock_task.exception.return_value = RuntimeError("callback failed")
+
+ with patch(
+ "litellm.proxy.management_helpers.audit_logs.verbose_proxy_logger"
+ ) as mock_logger:
+ _audit_log_task_done_callback(mock_task)
+ mock_logger.error.assert_called_once()
+ assert "callback failed" in str(mock_logger.error.call_args)
+
+ def test_no_log_on_success(self):
+ """Done callback should not log when task succeeds."""
+ mock_task = MagicMock(spec=asyncio.Task)
+ mock_task.exception.return_value = None
+
+ with patch(
+ "litellm.proxy.management_helpers.audit_logs.verbose_proxy_logger"
+ ) as mock_logger:
+ _audit_log_task_done_callback(mock_task)
+ mock_logger.error.assert_not_called()
+
+ def test_handles_cancelled_task(self):
+ """Done callback should handle cancelled tasks gracefully."""
+ mock_task = MagicMock(spec=asyncio.Task)
+ mock_task.exception.side_effect = asyncio.CancelledError()
+
+ with patch(
+ "litellm.proxy.management_helpers.audit_logs.verbose_proxy_logger"
+ ) as mock_logger:
+ _audit_log_task_done_callback(mock_task)
+ mock_logger.error.assert_not_called()
+
+
+class TestS3LoggerAuditLogEvent:
+ @pytest.mark.asyncio
+ async def test_queues_audit_log_with_correct_s3_key(self):
+ with patch(
+ "litellm.integrations.s3_v2.S3Logger.__init__", return_value=None
+ ):
+ from litellm.integrations.s3_v2 import S3Logger
+
+ logger = S3Logger()
+ logger.s3_path = "my-prefix"
+ logger.log_queue = []
+ logger.batch_size = 100
+
+ audit_log = StandardAuditLogPayload(
+ id="audit-123",
+ updated_at="2026-03-09T12:00:00+00:00",
+ changed_by="user-1",
+ changed_by_api_key="sk-abc",
+ action="created",
+ table_name="LiteLLM_TeamTable",
+ object_id="team-1",
+ before_value=None,
+ updated_values='{"name": "new"}',
+ )
+
+ await logger.async_log_audit_log_event(audit_log)
+
+ assert len(logger.log_queue) == 1
+ element = logger.log_queue[0]
+ assert element.s3_object_key.startswith("my-prefix/audit_logs/")
+ assert "audit-123" in element.s3_object_key
+ assert element.s3_object_key.endswith(".json")
+ assert element.s3_object_download_filename == "audit-audit-123.json"
+ assert element.payload["id"] == "audit-123"
+ assert element.payload["action"] == "created"
+
+ @pytest.mark.asyncio
+ async def test_s3_key_format_no_path(self):
+ with patch(
+ "litellm.integrations.s3_v2.S3Logger.__init__", return_value=None
+ ):
+ from litellm.integrations.s3_v2 import S3Logger
+
+ logger = S3Logger()
+ logger.s3_path = None
+ logger.log_queue = []
+ logger.batch_size = 100
+
+ audit_log = StandardAuditLogPayload(
+ id="audit-456",
+ updated_at="2026-03-09T12:00:00+00:00",
+ changed_by="user-1",
+ changed_by_api_key="sk-abc",
+ action="deleted",
+ table_name="LiteLLM_VerificationToken",
+ object_id="key-1",
+ before_value=None,
+ updated_values=None,
+ )
+
+ await logger.async_log_audit_log_event(audit_log)
+
+ assert len(logger.log_queue) == 1
+ element = logger.log_queue[0]
+ assert element.s3_object_key.startswith("audit_logs/")
+ assert "audit-456" in element.s3_object_key
diff --git a/tests/test_litellm/test_secret_redaction.py b/tests/test_litellm/test_secret_redaction.py
index 821529c111b..1d575195194 100644
--- a/tests/test_litellm/test_secret_redaction.py
+++ b/tests/test_litellm/test_secret_redaction.py
@@ -213,3 +213,52 @@ def test_json_excepthook_redacts_traceback_secrets():
output = h.formatter.format(record)
assert SECRET not in output
assert "REDACTED" in output
+
+
+def test_key_name_redaction_catches_secrets_in_dict_repr():
+ """Secrets inside dict repr strings are redacted based on key names."""
+ cases = [
+ # Python dict repr (the exact leak format from the bug report)
+ "param_name=general_settings, param_value={'master_key': 'my-random-secret-key-1234', 'enable_jwt_auth': True}",
+ # database_url
+ "'database_url': 'postgres://admin:password@db.example.com:5432/litellm'",
+ # JSON format
+ '"database_url": "postgres://admin:password@db.example.com:5432/litellm"',
+ # access_token
+ "'access_token': 'some-opaque-token-value'",
+ # refresh_token
+ "refresh_token=my-refresh-tok-12345",
+ # auth_token
+ "'auth_token': 'random-auth-value'",
+ # slack_webhook_url
+ "'slack_webhook_url': 'https://hooks.slack.com/services/T00/B00/xxx'",
+ ]
+ for secret_line in cases:
+ result = _redact_string(secret_line)
+ assert "REDACTED" in result, f"Key-name redaction missed: {secret_line!r}"
+
+ # Non-sensitive keys should NOT be redacted
+ safe = "'enable_jwt_auth': True, 'store_model_in_db': True"
+ assert _redact_string(safe) == safe
+
+
+def test_key_name_redaction_in_general_settings_dict():
+ """End-to-end: secrets inside a general_settings dict dump are redacted
+ when logged through the named litellm loggers."""
+
+ def log_messages():
+ general_settings = {
+ "master_key": "my-random-secret-key-1234",
+ "database_url": "postgres://admin:password@db.example.com:5432/litellm",
+ "enable_jwt_auth": True,
+ "store_model_in_db": True,
+ }
+ verbose_proxy_logger.debug(
+ f"param_name=general_settings, param_value={general_settings}"
+ )
+
+ output = _capture_logger_output(log_messages)
+ assert "my-random-secret-key-1234" not in output
+ assert "REDACTED" in output
+ # Non-sensitive values should survive
+ assert "enable_jwt_auth" in output
diff --git a/ui/litellm-dashboard/src/app/login/LoginPage.tsx b/ui/litellm-dashboard/src/app/login/LoginPage.tsx
index 5a9d420456c..e130dddc4a4 100644
--- a/ui/litellm-dashboard/src/app/login/LoginPage.tsx
+++ b/ui/litellm-dashboard/src/app/login/LoginPage.tsx
@@ -222,7 +222,7 @@ function LoginPageContent() {
{error && }
-
{
Form.useForm = () => [formMock];
- const Select = ({ children, onChange, ...props }: { children?: any; onChange?: (value: string) => void }) =>
+ const Select = ({ children, onChange, options, ...props }: { children?: any; onChange?: (value: string) => void; options?: Array<{ value: string; label: string }> }) =>
React.createElement(
"select",
{
@@ -127,6 +127,7 @@ vi.mock("antd", () => {
onChange: (event: any) => onChange?.(event.target.value),
},
children,
+ options?.map((opt: any) => React.createElement("option", { key: opt.value, value: opt.value }, opt.label)),
);
Select.Option = ({ children, ...props }: { children?: any }) =>
@@ -238,6 +239,16 @@ vi.mock("../key_team_helpers/fetch_available_models_team_key", () => ({
getModelDisplayName: (model: string) => model,
}));
+vi.mock("@/app/(dashboard)/hooks/tags/useTags", () => ({
+ useTags: vi.fn().mockReturnValue({
+ data: [
+ { name: "production", description: "Prod tag", models: [], created_at: "2026-01-01", updated_at: "2026-01-01" },
+ { name: "staging", description: "Staging tag", models: [], created_at: "2026-01-01", updated_at: "2026-01-01" },
+ ],
+ isLoading: false,
+ }),
+}));
+
vi.mock("@/app/(dashboard)/hooks/projects/useProjects", () => ({
useProjects: vi.fn().mockReturnValue({ data: [], isLoading: false }),
}));
@@ -525,4 +536,19 @@ describe("CreateKey", () => {
expect(formStateRef.current["organization_id"]).toBe("org-1");
});
});
+
+ describe("tags dropdown", () => {
+ it("should populate tags dropdown with options from useTags hook", async () => {
+ renderWithProviders( );
+
+ act(() => {
+ fireEvent.click(screen.getByRole("button", { name: /create new key/i }));
+ });
+
+ await waitFor(() => {
+ expect(screen.getByText("production")).toBeInTheDocument();
+ expect(screen.getByText("staging")).toBeInTheDocument();
+ });
+ });
+ });
});
diff --git a/ui/litellm-dashboard/src/components/organisms/create_key_button.tsx b/ui/litellm-dashboard/src/components/organisms/create_key_button.tsx
index d0a7a909d6e..0c0e4ccd12d 100644
--- a/ui/litellm-dashboard/src/components/organisms/create_key_button.tsx
+++ b/ui/litellm-dashboard/src/components/organisms/create_key_button.tsx
@@ -2,6 +2,7 @@
import { keyKeys } from "@/app/(dashboard)/hooks/keys/useKeys";
import { useOrganizations } from "@/app/(dashboard)/hooks/organizations/useOrganizations";
import { useProjects } from "@/app/(dashboard)/hooks/projects/useProjects";
+import { useTags } from "@/app/(dashboard)/hooks/tags/useTags";
import { useUISettings } from "@/app/(dashboard)/hooks/uiSettings/useUISettings";
import useAuthorized from "@/app/(dashboard)/hooks/useAuthorized";
import { formatNumberWithCommas } from "@/utils/dataUtils";
@@ -165,8 +166,12 @@ const CreateKey: React.FC = ({ team, teams, data, addKey, autoOp
const { data: organizations, isLoading: isOrganizationsLoading } = useOrganizations();
const { data: projects, isLoading: isProjectsLoading } = useProjects();
const { data: uiSettingsData } = useUISettings();
+ const { data: tagsData } = useTags();
const enableProjectsUI = Boolean(uiSettingsData?.values?.enable_projects_ui);
const disableCustomApiKeys = Boolean(uiSettingsData?.values?.disable_custom_api_keys);
+ const tagOptions = tagsData
+ ? Object.values(tagsData).map((tag) => ({ value: tag.name, label: tag.name }))
+ : [];
const queryClient = useQueryClient();
const [form] = Form.useForm();
const [isModalVisible, setIsModalVisible] = useState(false);
@@ -175,7 +180,6 @@ const CreateKey: React.FC = ({ team, teams, data, addKey, autoOp
const [userModels, setUserModels] = useState([]);
const [modelsToPick, setModelsToPick] = useState([]);
const [keyOwner, setKeyOwner] = useState("you");
- const [predefinedTags, setPredefinedTags] = useState(getPredefinedTags(data));
const [hasPrefilled, setHasPrefilled] = useState(false);
const [pendingPrefillModels, setPendingPrefillModels] = useState(null);
const [guardrailsList, setGuardrailsList] = useState([]);
@@ -1340,9 +1344,9 @@ const CreateKey: React.FC = ({ team, teams, data, addKey, autoOp
diff --git a/ui/litellm-dashboard/src/components/view_users/columns.tsx b/ui/litellm-dashboard/src/components/view_users/columns.tsx
index 73750d7a48a..4b9fad8f23c 100644
--- a/ui/litellm-dashboard/src/components/view_users/columns.tsx
+++ b/ui/litellm-dashboard/src/components/view_users/columns.tsx
@@ -3,7 +3,8 @@ import { Badge, Grid, Icon } from "@tremor/react";
import { Tooltip, Checkbox } from "antd";
import { UserInfo } from "./types";
import { PencilAltIcon, TrashIcon, InformationCircleIcon, RefreshIcon } from "@heroicons/react/outline";
-import { formatNumberWithCommas } from "@/utils/dataUtils";
+import { CopyOutlined } from "@ant-design/icons";
+import { formatNumberWithCommas, copyToClipboard } from "@/utils/dataUtils";
interface SelectionOptions {
selectedUsers: UserInfo[];
@@ -29,9 +30,22 @@ export const columns = (
accessorKey: "user_id",
enableSorting: true,
cell: ({ row }) => (
-
- {row.original.user_id ? `${row.original.user_id.slice(0, 7)}...` : "-"}
-
+
+
+ {row.original.user_id ? `${row.original.user_id.slice(0, 7)}...` : "-"}
+
+ {row.original.user_id && (
+
+ {
+ e.stopPropagation();
+ copyToClipboard(row.original.user_id, "User ID copied to clipboard");
+ }}
+ className="cursor-pointer text-gray-500 hover:text-blue-500 text-xs"
+ />
+
+ )}
+
),
},
{