Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions core/providers/anthropic/anthropic.go
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ func (provider *AnthropicProvider) completeRequest(ctx *schemas.BifrostContext,
}
req.Header.Set("anthropic-version", provider.apiVersion)

if betaHeaders := FilterBetaHeadersForProvider(MergeBetaHeaders(provider.networkConfig.ExtraHeaders, ctx), schemas.Anthropic, provider.networkConfig.BetaHeaderOverrides); len(betaHeaders) > 0 {
if betaHeaders := FilterBetaHeadersForProvider(MergeBetaHeaders(ctx, provider.networkConfig.ExtraHeaders), schemas.Anthropic, provider.networkConfig.BetaHeaderOverrides); len(betaHeaders) > 0 {
req.Header.Set(AnthropicBetaHeader, strings.Join(betaHeaders, ","))
} else {
req.Header.Del(AnthropicBetaHeader)
Expand Down Expand Up @@ -615,7 +615,7 @@ func HandleAnthropicChatCompletionStreaming(

providerUtils.SetExtraHeaders(ctx, req, extraHeaders, []string{AnthropicBetaHeader})

if betaHeaders := FilterBetaHeadersForProvider(MergeBetaHeaders(extraHeaders, ctx), providerName, betaHeaderOverrides); len(betaHeaders) > 0 {
if betaHeaders := FilterBetaHeadersForProvider(MergeBetaHeaders(ctx, extraHeaders), providerName, betaHeaderOverrides); len(betaHeaders) > 0 {
req.Header.Set(AnthropicBetaHeader, strings.Join(betaHeaders, ","))
} else {
req.Header.Del(AnthropicBetaHeader)
Expand Down Expand Up @@ -1080,7 +1080,7 @@ func HandleAnthropicResponsesStream(

providerUtils.SetExtraHeaders(ctx, req, extraHeaders, []string{AnthropicBetaHeader})

if betaHeaders := FilterBetaHeadersForProvider(MergeBetaHeaders(extraHeaders, ctx), providerName, betaHeaderOverrides); len(betaHeaders) > 0 {
if betaHeaders := FilterBetaHeadersForProvider(MergeBetaHeaders(ctx, extraHeaders), providerName, betaHeaderOverrides); len(betaHeaders) > 0 {
req.Header.Set(AnthropicBetaHeader, strings.Join(betaHeaders, ","))
} else {
req.Header.Del(AnthropicBetaHeader)
Expand Down
2 changes: 1 addition & 1 deletion core/providers/anthropic/request_builder.go
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ func BuildAnthropicResponsesRequestBody(ctx *schemas.BifrostContext, request *sc
}

if cfg.InjectBetaHeadersIntoBody {
if betaHeaders := FilterBetaHeadersForProvider(MergeBetaHeaders(cfg.ProviderExtraHeaders, ctx), cfg.Provider, cfg.BetaHeaderOverrides); len(betaHeaders) > 0 {
if betaHeaders := FilterBetaHeadersForProvider(MergeBetaHeaders(ctx, cfg.ProviderExtraHeaders), cfg.Provider, cfg.BetaHeaderOverrides); len(betaHeaders) > 0 {
jsonBody, err = providerUtils.SetJSONField(jsonBody, "anthropic_beta", betaHeaders)
if err != nil {
return nil, newErr(schemas.ErrProviderRequestMarshal, err, jsonBody)
Expand Down
27 changes: 20 additions & 7 deletions core/providers/anthropic/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,10 @@ const (
AnthropicContext1MBetaHeaderPrefix = "context-1m-"
AnthropicFastModeBetaHeaderPrefix = "fast-mode-"
AnthropicRedactThinkingBetaHeaderPrefix = "redact-thinking-"
AnthropicTaskBudgetsBetaHeaderPrefix = "task-budgets-"
AnthropicEagerInputStreamingBetaHeaderPrefix = "fine-grained-tool-streaming-"
AnthropicTaskBudgetsBetaHeaderPrefix = "task-budgets-"
AnthropicEagerInputStreamingBetaHeaderPrefix = "fine-grained-tool-streaming-"
AnthropicContextManagementBetaHeaderPrefix = "context-management-"
AnthropicCompactionBetaHeaderPrefix = "compact-"
)

// ProviderFeatureSupport defines which Anthropic features a given provider supports.
Expand Down Expand Up @@ -114,8 +116,9 @@ type ProviderFeatureSupport struct {
InputExamples bool // tool.input_examples standalone — tool-examples-2025-10-29. Bedrock supports this independently of the AdvancedToolUse bundle (cite: B-header). On Anthropic / Azure the bundle implicitly covers it.
StructuredOutputs bool // strict tool validation / output_format (cite: A)
PromptCachingScope bool // cache_control.scope — prompt-caching-scope-2026-01-05 (cite: A)
Compaction bool // compact_20260112 (cite: A, B-header)
ContextEditing bool // clear_tool_uses / clear_thinking (cite: A, B-header)
Compaction bool // compact_20260112 (cite: A, B-header)
ContextEditing bool // clear_tool_uses / clear_thinking (cite: A, B-header)
ContextManagementField bool // provider accepts the context_management JSON body field at all; false → entire field dropped regardless of edit types
Comment thread
coderabbitai[bot] marked this conversation as resolved.
FilesAPI bool // files-api-2025-04-14, file_id source (cite: A)
InterleavedThinking bool // interleaved thinking between tool calls (cite: A, B-header; fails on non-allowlisted models on Bedrock/Vertex)
Skills bool // Agent Skills — container.skills object (cite: A)
Expand All @@ -142,7 +145,7 @@ var ProviderFeatures = map[schemas.ModelProvider]ProviderFeatureSupport{
WebSearch: true, WebSearchDynamic: true, WebFetch: true, CodeExecution: true,
ComputerUse: true, Bash: true, Memory: true, TextEditor: true, ToolSearch: true,
MCP: true, AdvancedToolUse: true, InputExamples: true, StructuredOutputs: true, PromptCachingScope: true,
Compaction: true, ContextEditing: true, FilesAPI: true,
Compaction: true, ContextEditing: true, ContextManagementField: true, FilesAPI: true,
InterleavedThinking: true, Skills: true, ContainerBasic: true, Context1M: true,
FastMode: true, RedactThinking: true, TaskBudgets: true,
InferenceGeo: true, EagerInputStreaming: true, AdvisorTool: true,
Expand All @@ -151,13 +154,22 @@ var ProviderFeatures = map[schemas.ModelProvider]ProviderFeatureSupport{
// Notably NOT supported: MCP (MCP-excl), Skills/container.skills,
// InferenceGeo, FastMode, TaskBudgets, AdvisorTool, StructuredOutputs,
// PromptCachingScope (400 "unexpected beta header" per LiteLLM #19984),
// ContextEditing (400 "unexpected beta header" per live API error),
// ContextManagementField (400 "Extra inputs are not permitted" per live API error
// when the request body carries a context_management object).
// Compaction IS supported on Vertex via the compact-2026-01-12 beta header even
// though Anthropic's compaction docs don't list Vertex (verified by live
// testing). The header passes through FilterBetaHeadersForProvider because
// Compaction: true; the body-field stripper at utils.go:460 removes any
// client-side context_management payload (gated by ContextManagementField:
// false) so the request still succeeds. The two flags are intentionally
// independent: one controls header forwarding, the other controls body shape.
// FilesAPI, WebFetch, CodeExecution, AdvancedToolUse, RedactThinking.
schemas.Vertex: {
WebSearch: true, // web search GA on Vertex per A; earlier code restricted to web_search_20250305 — A doesn't qualify
ComputerUse: true, Bash: true, Memory: true, TextEditor: true, ToolSearch: true,
ContainerBasic: true,
Compaction: true,
ContextEditing: true,
InterleavedThinking: true, // V-platform confirms; fails on non-allowlisted 4-series
Context1M: true,
EagerInputStreaming: true, // fine-grained-tool-streaming GA per A
Expand All @@ -178,6 +190,7 @@ var ProviderFeatures = map[schemas.ModelProvider]ProviderFeatureSupport{
StructuredOutputs: true,
Compaction: true, // compact-2026-01-12 per B-header
ContextEditing: true, // context-management-2025-06-27 per B-header (bundles memory)
ContextManagementField: true, // Bedrock accepts context_management body field
InterleavedThinking: true, // per B-header; model-allowlisted
Context1M: true, // Opus 4.6 / Sonnet 4.6 per A
EagerInputStreaming: true, // fine-grained-tool-streaming-2025-05-14 per B-header
Expand All @@ -193,7 +206,7 @@ var ProviderFeatures = map[schemas.ModelProvider]ProviderFeatureSupport{
WebSearch: true, WebSearchDynamic: true, WebFetch: true, CodeExecution: true,
ComputerUse: true, Bash: true, Memory: true, TextEditor: true, ToolSearch: true,
MCP: true, AdvancedToolUse: true, InputExamples: true, StructuredOutputs: true, PromptCachingScope: true,
Compaction: true, ContextEditing: true, FilesAPI: true,
Compaction: true, ContextEditing: true, ContextManagementField: true, FilesAPI: true,
InterleavedThinking: true, Skills: true, ContainerBasic: true, Context1M: true,
RedactThinking: true, TaskBudgets: true,
EagerInputStreaming: true,
Expand Down
78 changes: 45 additions & 33 deletions core/providers/anthropic/utils.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import (
"context"
"encoding/json"
"fmt"
"slices"
"strings"

"github.com/bytedance/sonic"
Expand Down Expand Up @@ -453,36 +454,44 @@ func StripUnsupportedFieldsFromRawBody(jsonBody []byte, provider schemas.ModelPr
}
}

// context_management.edits[] — gate per edit.type.
if editsResult := providerUtils.GetJSONField(jsonBody, "context_management.edits"); editsResult.Exists() && editsResult.IsArray() {
edits := editsResult.Array()
// Collect indices to drop (iterate forwards, delete in reverse).
dropIndices := []int{}
for i, edit := range edits {
editType := edit.Get("type").String()
keep := true
switch editType {
case string(ContextManagementEditTypeCompact):
keep = features.Compaction
case string(ContextManagementEditTypeClearToolUses), string(ContextManagementEditTypeClearThinking):
keep = features.ContextEditing
}
if !keep {
dropIndices = append(dropIndices, i)
}
}
if len(dropIndices) == len(edits) && len(edits) > 0 {
// All edits unsupported — drop the whole context_management.
// context_management — if the provider doesn't accept the field at all (e.g. Vertex),
// drop it entirely. Otherwise gate per edit.type.
if providerUtils.JSONFieldExists(jsonBody, "context_management") {
if !features.ContextManagementField {
jsonBody, err = providerUtils.DeleteJSONField(jsonBody, "context_management")
if err != nil {
return nil, fmt.Errorf("strip raw context_management: %w", err)
}
} else {
for i := len(dropIndices) - 1; i >= 0; i-- {
path := fmt.Sprintf("context_management.edits.%d", dropIndices[i])
jsonBody, err = providerUtils.DeleteJSONField(jsonBody, path)
} else if editsResult := providerUtils.GetJSONField(jsonBody, "context_management.edits"); editsResult.Exists() && editsResult.IsArray() {
edits := editsResult.Array()
// Collect indices to drop (iterate forwards, delete in reverse).
dropIndices := []int{}
for i, edit := range edits {
editType := edit.Get("type").String()
keep := true
switch editType {
case string(ContextManagementEditTypeCompact):
keep = features.Compaction
case string(ContextManagementEditTypeClearToolUses), string(ContextManagementEditTypeClearThinking):
keep = features.ContextEditing
}
if !keep {
dropIndices = append(dropIndices, i)
}
}
if len(dropIndices) == len(edits) {
// No edits to keep (either empty input or all unsupported) — drop the whole context_management.
jsonBody, err = providerUtils.DeleteJSONField(jsonBody, "context_management")
if err != nil {
return nil, fmt.Errorf("strip raw context_management.edits[%d]: %w", dropIndices[i], err)
return nil, fmt.Errorf("strip raw context_management: %w", err)
}
} else {
for i := len(dropIndices) - 1; i >= 0; i-- {
path := fmt.Sprintf("context_management.edits.%d", dropIndices[i])
jsonBody, err = providerUtils.DeleteJSONField(jsonBody, path)
if err != nil {
return nil, fmt.Errorf("strip raw context_management.edits[%d]: %w", dropIndices[i], err)
}
}
}
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.
Expand Down Expand Up @@ -1138,11 +1147,11 @@ var betaHeaderPrefixToFeature = map[string]func(ProviderFeatureSupport) bool{

// MergeBetaHeaders collects anthropic-beta values from provider ExtraHeaders and
// per-request context headers, deduplicating them.
func MergeBetaHeaders(providerExtraHeaders map[string]string, ctx context.Context) []string {
func MergeBetaHeaders(ctx context.Context, providerExtraHeaders map[string]string) []string {
seen := make(map[string]bool)
var all []string
add := func(v string) {
for _, part := range strings.Split(v, ",") {
for part := range strings.SplitSeq(v, ",") {
if t := strings.TrimSpace(part); t != "" && !seen[t] {
seen[t] = true
all = append(all, t)
Expand Down Expand Up @@ -1185,8 +1194,7 @@ func FilterBetaHeadersForProvider(headers []string, provider schemas.ModelProvid

filtered := make([]string, 0, len(headers))
for _, h := range headers {
tokens := strings.Split(h, ",")
for _, token := range tokens {
for token := range strings.SplitSeq(h, ",") {
token = strings.TrimSpace(token)

if token == "" {
Expand Down Expand Up @@ -1252,10 +1260,8 @@ func FilterBetaHeadersForProvider(headers []string, provider schemas.ModelProvid

// appendUniqueHeader adds a header to the slice if not already present
func appendUniqueHeader(slice []string, item string) []string {
for _, s := range slice {
if s == item {
return slice
}
if slices.Contains(slice, item) {
return slice
}
return append(slice, item)
}
Expand Down Expand Up @@ -1952,6 +1958,12 @@ func filterEnumValuesByType(enumValues []interface{}, schemaType string) []inter
return filtered
}

// NormalizeSchemaForAnthropic is the exported entry point for normalizeSchemaForAnthropic,
// used by providers (e.g. Bedrock) that share Anthropic's schema validation rules.
func NormalizeSchemaForAnthropic(schema map[string]interface{}) map[string]interface{} {
return normalizeSchemaForAnthropic(schema)
}

// normalizeSchemaForAnthropic recursively normalizes a JSON schema to be compatible with Anthropic's API.
// This handles cases where:
// 1. type is an array like ["string", "null"] - converted to single type
Expand Down
Loading
Loading