Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions litellm/llms/anthropic/common_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,15 @@
from litellm.types.llms.openai import AllMessageValues


def is_anthropic_oauth_key(value: Optional[str]) -> bool:
"""Check if a value contains an Anthropic OAuth token (sk-ant-oat*)."""
if value is None:
return False
# Handle both raw token and "Bearer <token>" format
if value.startswith("Bearer "):
value = value[7:]
return value.startswith(ANTHROPIC_OAUTH_TOKEN_PREFIX)

def optionally_handle_anthropic_oauth(
Comment on lines +32 to 34
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Missing blank line between functions

PEP 8 requires two blank lines between top-level function definitions. There's only one blank line separating is_anthropic_oauth_key from optionally_handle_anthropic_oauth.

Suggested change
return value.startswith(ANTHROPIC_OAUTH_TOKEN_PREFIX)
def optionally_handle_anthropic_oauth(
return value.startswith(ANTHROPIC_OAUTH_TOKEN_PREFIX)
def optionally_handle_anthropic_oauth(

Note: If this suggestion doesn't match your team's coding style, reply to this and let me know. I'll remember it for next time!

headers: dict, api_key: Optional[str]
) -> tuple[dict, Optional[str]]:
Expand Down
19 changes: 18 additions & 1 deletion litellm/proxy/litellm_pre_call_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,15 +237,22 @@ def clean_headers(
"""
Removes litellm api key from headers
"""
from litellm.llms.anthropic.common_utils import is_anthropic_oauth_key
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Provider-specific import in proxy layer

This introduces Anthropic-specific logic (is_anthropic_oauth_key) into the proxy's clean_headers() function, which is a general-purpose proxy utility. The project's custom instructions recommend avoiding provider-specific code outside of the llms/ directory.

That said, I see that add_provider_specific_headers_to_request() already contains Anthropic-specific header handling (the ANTHROPIC_API_HEADERS loop), so this PR extends an existing pattern rather than introducing a new one. If the maintainers want to address this in the future, consider extracting a more generic "provider pass-through credential" abstraction that could work for any provider, rather than hardcoding Anthropic OAuth detection in the proxy layer.

Context Used: Rule from dashboard - What: Avoid writing provider-specific code outside of the llms/ directory.

Why: This practice ensur... (source)

Note: If this suggestion doesn't match your team's coding style, reply to this and let me know. I'll remember it for next time!


clean_headers = {}
litellm_key_lower = (
litellm_key_header_name.lower() if litellm_key_header_name is not None else None
)

for header, value in headers.items():
header_lower = header.lower()
# Preserve Authorization header if it contains Anthropic OAuth token (sk-ant-oat*)
# This allows OAuth tokens to be forwarded to Anthropic-compatible providers
# via add_provider_specific_headers_to_request()
if header_lower == "authorization" and is_anthropic_oauth_key(value):
clean_headers[header] = value
# Check if header should be excluded: either in special headers cache or matches custom litellm key
if header_lower not in _SPECIAL_HEADERS_CACHE and (
elif header_lower not in _SPECIAL_HEADERS_CACHE and (
litellm_key_lower is None or header_lower != litellm_key_lower
):
clean_headers[header] = value
Expand Down Expand Up @@ -1687,6 +1694,8 @@ def add_provider_specific_headers_to_request(
data: dict,
headers: dict,
):
from litellm.llms.anthropic.common_utils import is_anthropic_oauth_key

anthropic_headers = {}
# boolean to indicate if a header was added
added_header = False
Expand All @@ -1696,6 +1705,14 @@ def add_provider_specific_headers_to_request(
anthropic_headers[header] = header_value
added_header = True

# Check for Authorization header with Anthropic OAuth token (sk-ant-oat*)
# This needs to be handled via provider-specific headers to ensure it only
# goes to Anthropic-compatible providers, not all providers in the router
for header, value in headers.items():
if header.lower() == "authorization" and is_anthropic_oauth_key(value):
anthropic_headers[header] = value
added_header = True
break
if added_header is True:
# Anthropic headers work across multiple providers
# Store as comma-separated list so retrieval can match any of them
Expand Down
139 changes: 139 additions & 0 deletions tests/test_litellm/llms/anthropic/test_anthropic_common_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,3 +283,142 @@ def test_passthrough_regular_key_uses_x_api_key(self):

assert updated_headers["x-api-key"] == FAKE_REGULAR_KEY
assert "authorization" not in updated_headers


class TestIsAnthropicOAuthKey:
"""Tests for is_anthropic_oauth_key helper function."""

def test_oauth_token_raw(self):
"""Raw OAuth token should be detected."""
from litellm.llms.anthropic.common_utils import is_anthropic_oauth_key

assert is_anthropic_oauth_key("sk-ant-oat01-abc123") is True
assert is_anthropic_oauth_key("sk-ant-oat02-xyz789") is True

def test_oauth_token_bearer_format(self):
"""Bearer-prefixed OAuth token should be detected."""
from litellm.llms.anthropic.common_utils import is_anthropic_oauth_key

assert is_anthropic_oauth_key("Bearer sk-ant-oat01-abc123") is True
assert is_anthropic_oauth_key("Bearer sk-ant-oat02-xyz789") is True

def test_non_oauth_tokens(self):
"""Non-OAuth values should return False."""
from litellm.llms.anthropic.common_utils import is_anthropic_oauth_key

assert is_anthropic_oauth_key(None) is False
assert is_anthropic_oauth_key("") is False
assert is_anthropic_oauth_key("sk-ant-api01-abc123") is False
assert is_anthropic_oauth_key("Bearer sk-ant-api01-abc123") is False

def test_case_sensitivity(self):
"""OAuth prefix matching should be case-sensitive."""
from litellm.llms.anthropic.common_utils import is_anthropic_oauth_key

assert is_anthropic_oauth_key("sk-ant-OAT01-abc123") is False
assert is_anthropic_oauth_key("SK-ANT-OAT01-abc123") is False

def test_just_prefix(self):
"""Just the prefix with no suffix should still match."""
from litellm.llms.anthropic.common_utils import is_anthropic_oauth_key

assert is_anthropic_oauth_key("sk-ant-oat") is True


class TestProxyOAuthHeaderForwarding:
"""Tests for proxy-layer OAuth header preservation and forwarding."""

def test_clean_headers_preserves_oauth_authorization(self):
"""clean_headers should preserve Authorization header with OAuth tokens."""
from starlette.datastructures import Headers

from litellm.proxy.litellm_pre_call_utils import clean_headers

raw_headers = Headers(
raw=[
(b"authorization", f"Bearer {FAKE_OAUTH_TOKEN}".encode()),
(b"content-type", b"application/json"),
]
)
cleaned = clean_headers(raw_headers)

assert "authorization" in cleaned
assert cleaned["authorization"] == f"Bearer {FAKE_OAUTH_TOKEN}"
assert cleaned["content-type"] == "application/json"

def test_clean_headers_strips_non_oauth_authorization(self):
"""clean_headers should strip Authorization header with regular API keys."""
from starlette.datastructures import Headers

from litellm.proxy.litellm_pre_call_utils import clean_headers

raw_headers = Headers(
raw=[
(b"authorization", b"Bearer sk-regular-key-123"),
(b"content-type", b"application/json"),
]
)
cleaned = clean_headers(raw_headers)

assert "authorization" not in cleaned
assert cleaned["content-type"] == "application/json"

def test_add_provider_specific_headers_forwards_oauth(self):
"""add_provider_specific_headers_to_request should forward OAuth Authorization
as a ProviderSpecificHeader scoped to Anthropic-compatible providers."""
from litellm.proxy.litellm_pre_call_utils import (
add_provider_specific_headers_to_request,
)

data: dict = {}
headers = {
"authorization": f"Bearer {FAKE_OAUTH_TOKEN}",
"content-type": "application/json",
}

add_provider_specific_headers_to_request(data=data, headers=headers)

assert "provider_specific_header" in data
psh = data["provider_specific_header"]
assert "anthropic" in psh["custom_llm_provider"]
assert "bedrock" in psh["custom_llm_provider"]
assert "vertex_ai" in psh["custom_llm_provider"]
assert psh["extra_headers"]["authorization"] == f"Bearer {FAKE_OAUTH_TOKEN}"

def test_add_provider_specific_headers_ignores_non_oauth(self):
"""add_provider_specific_headers_to_request should not create a
ProviderSpecificHeader for non-OAuth Authorization headers."""
from litellm.proxy.litellm_pre_call_utils import (
add_provider_specific_headers_to_request,
)

data: dict = {}
headers = {
"authorization": "Bearer sk-regular-key-123",
"content-type": "application/json",
}

add_provider_specific_headers_to_request(data=data, headers=headers)

assert "provider_specific_header" not in data

def test_add_provider_specific_headers_combines_anthropic_and_oauth(self):
"""When both anthropic-beta and OAuth Authorization are present, both
should be included in the ProviderSpecificHeader."""
from litellm.proxy.litellm_pre_call_utils import (
add_provider_specific_headers_to_request,
)

data: dict = {}
headers = {
"authorization": f"Bearer {FAKE_OAUTH_TOKEN}",
"anthropic-beta": "oauth-2025-04-20",
"content-type": "application/json",
}

add_provider_specific_headers_to_request(data=data, headers=headers)

assert "provider_specific_header" in data
psh = data["provider_specific_header"]
assert psh["extra_headers"]["authorization"] == f"Bearer {FAKE_OAUTH_TOKEN}"
assert psh["extra_headers"]["anthropic-beta"] == "oauth-2025-04-20"
Loading