Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions packages/opencode/src/provider/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -961,6 +961,21 @@ export namespace Provider {
(v) => omit(v, ["disabled"]),
)
}

// Auto-detect 1M context window for Claude models when the user
// configures the context-1m beta flag. Without this, models.dev reports
// 200k and compaction triggers too early.
// Bedrock uses providerOptions: anthropicBeta (array of strings).
// Anthropic direct uses HTTP headers: anthropic-beta (comma-separated string).
if (model.limit.context <= 200_000) {
const has1mBeta =
(Array.isArray(model.options?.anthropicBeta) &&
model.options.anthropicBeta.some((b: string) => /^context-1m-/.test(b))) ||
/context-1m-/.test(model.headers?.["anthropic-beta"] ?? "")
if (has1mBeta) {
model.limit = { ...model.limit, context: 1_000_000 }
}
}
}

if (Object.keys(provider.models).length === 0) {
Expand Down
15 changes: 12 additions & 3 deletions packages/opencode/src/provider/transform.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,14 @@ function mimeToModality(mime: string): Modality | undefined {
export namespace ProviderTransform {
export const OUTPUT_TOKEN_MAX = Flag.OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX || 32_000

// Returns true if the model is a Claude model on any provider (Anthropic, Bedrock, Vertex).
// Claude uniformly rejects empty content blocks regardless of host provider.
function isClaude(model: Provider.Model): boolean {
return (
model.api.npm === "@ai-sdk/anthropic" || model.api.id.includes("claude") || model.api.id.includes("anthropic")
)
}

// Maps npm package to the key the AI SDK expects for providerOptions
function sdkKey(npm: string): string | undefined {
switch (npm) {
Expand Down Expand Up @@ -49,9 +57,10 @@ export namespace ProviderTransform {
model: Provider.Model,
options: Record<string, unknown>,
): ModelMessage[] {
// Anthropic rejects messages with empty content - filter out empty string messages
// and remove empty text/reasoning parts from array content
if (model.api.npm === "@ai-sdk/anthropic") {
// Claude rejects messages with empty content - filter out empty string messages
// and remove empty text/reasoning parts from array content.
// This applies to all providers hosting Claude models (Anthropic, Bedrock, Vertex).
if (isClaude(model)) {
msgs = msgs
.map((msg) => {
if (typeof msg.content === "string") {
Expand Down
2 changes: 1 addition & 1 deletion packages/opencode/src/session/compaction.ts
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ When constructing the summary, try to stick to this template:
tools: {},
system: [],
messages: [
...MessageV2.toModelMessages(input.messages, model),
...MessageV2.toModelMessages(input.messages, model, { stripMetadata: true }),
{
role: "user",
content: [
Expand Down
19 changes: 12 additions & 7 deletions packages/opencode/src/session/message-v2.ts
Original file line number Diff line number Diff line change
Expand Up @@ -442,7 +442,11 @@ export namespace MessageV2 {
})
export type WithParts = z.infer<typeof WithParts>

export function toModelMessages(input: WithParts[], model: Provider.Model): ModelMessage[] {
export function toModelMessages(
input: WithParts[],
model: Provider.Model,
options?: { stripMetadata?: boolean },
): ModelMessage[] {
const result: UIMessage[] = []
const toolNames = new Set<string>()
// Track media from tool results that need to be injected as user messages
Expand Down Expand Up @@ -540,7 +544,8 @@ export namespace MessageV2 {
}

if (msg.info.role === "assistant") {
const differentModel = `${model.providerID}/${model.id}` !== `${msg.info.providerID}/${msg.info.modelID}`
const skipMetadata =
options?.stripMetadata || `${model.providerID}/${model.id}` !== `${msg.info.providerID}/${msg.info.modelID}`
const media: Array<{ mime: string; url: string }> = []

if (
Expand All @@ -562,7 +567,7 @@ export namespace MessageV2 {
assistantMessage.parts.push({
type: "text",
text: part.text,
...(differentModel ? {} : { providerMetadata: part.metadata }),
...(skipMetadata ? {} : { providerMetadata: part.metadata }),
})
if (part.type === "step-start")
assistantMessage.parts.push({
Expand Down Expand Up @@ -599,7 +604,7 @@ export namespace MessageV2 {
toolCallId: part.callID,
input: part.state.input,
output,
...(differentModel ? {} : { callProviderMetadata: part.metadata }),
...(skipMetadata ? {} : { callProviderMetadata: part.metadata }),
})
}
if (part.state.status === "error")
Expand All @@ -609,7 +614,7 @@ export namespace MessageV2 {
toolCallId: part.callID,
input: part.state.input,
errorText: part.state.error,
...(differentModel ? {} : { callProviderMetadata: part.metadata }),
...(skipMetadata ? {} : { callProviderMetadata: part.metadata }),
})
// Handle pending/running tool calls to prevent dangling tool_use blocks
// Anthropic/Claude APIs require every tool_use to have a corresponding tool_result
Expand All @@ -620,14 +625,14 @@ export namespace MessageV2 {
toolCallId: part.callID,
input: part.state.input,
errorText: "[Tool execution was interrupted]",
...(differentModel ? {} : { callProviderMetadata: part.metadata }),
...(skipMetadata ? {} : { callProviderMetadata: part.metadata }),
})
}
if (part.type === "reasoning") {
assistantMessage.parts.push({
type: "reasoning",
text: part.text,
...(differentModel ? {} : { providerMetadata: part.metadata }),
...(skipMetadata ? {} : { providerMetadata: part.metadata }),
})
}
}
Expand Down
108 changes: 108 additions & 0 deletions packages/opencode/test/provider/provider.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2127,3 +2127,111 @@ test("custom model with variants enabled and disabled", async () => {
},
})
})

test("bedrock model with anthropicBeta context-1m option gets 1M context limit", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
provider: {
"amazon-bedrock": {
models: {
"us.anthropic.claude-opus-4-6-v1": {
options: {
anthropicBeta: ["context-1m-2025-08-07"],
},
},
},
},
},
}),
)
},
})
await Instance.provide({
directory: tmp.path,
init: async () => {
Env.set("AWS_ACCESS_KEY_ID", "test")
Env.set("AWS_SECRET_ACCESS_KEY", "test")
Env.set("AWS_REGION", "us-east-1")
},
fn: async () => {
const providers = await Provider.list()
const model = providers["amazon-bedrock"]?.models["us.anthropic.claude-opus-4-6-v1"]
if (!model) return // skip if models.dev doesn't include this model in test env
expect(model.limit.context).toBe(1_000_000)
},
})
})

test("bedrock model without anthropicBeta keeps default context limit", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
provider: {
"amazon-bedrock": {
models: {
"us.anthropic.claude-opus-4-6-v1": {},
},
},
},
}),
)
},
})
await Instance.provide({
directory: tmp.path,
init: async () => {
Env.set("AWS_ACCESS_KEY_ID", "test")
Env.set("AWS_SECRET_ACCESS_KEY", "test")
Env.set("AWS_REGION", "us-east-1")
},
fn: async () => {
const providers = await Provider.list()
const model = providers["amazon-bedrock"]?.models["us.anthropic.claude-opus-4-6-v1"]
if (!model) return
expect(model.limit.context).toBeLessThanOrEqual(200_000)
},
})
})

test("anthropic model with context-1m header gets 1M context limit", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
provider: {
anthropic: {
models: {
"claude-opus-4-6": {
headers: {
"anthropic-beta": "context-1m-2025-08-07",
},
},
},
},
},
}),
)
},
})
await Instance.provide({
directory: tmp.path,
init: async () => {
Env.set("ANTHROPIC_API_KEY", "test-key")
},
fn: async () => {
const providers = await Provider.list()
const model = providers["anthropic"]?.models["claude-opus-4-6"]
if (!model) return
expect(model.limit.context).toBe(1_000_000)
},
})
})
64 changes: 62 additions & 2 deletions packages/opencode/test/provider/transform.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -649,7 +649,7 @@ describe("ProviderTransform.message - empty image handling", () => {
})
})

describe("ProviderTransform.message - anthropic empty content filtering", () => {
describe("ProviderTransform.message - claude empty content filtering", () => {
const anthropicModel = {
id: "anthropic/claude-3-5-sonnet",
providerID: "anthropic",
Expand Down Expand Up @@ -797,7 +797,7 @@ describe("ProviderTransform.message - anthropic empty content filtering", () =>
expect(result[0].content[1]).toEqual({ type: "text", text: "Result" })
})

test("does not filter for non-anthropic providers", () => {
test("does not filter for non-claude providers", () => {
const openaiModel = {
...anthropicModel,
providerID: "openai",
Expand All @@ -822,6 +822,66 @@ describe("ProviderTransform.message - anthropic empty content filtering", () =>
expect(result[0].content).toBe("")
expect(result[1].content).toHaveLength(1)
})

test("filters empty content for bedrock claude models", () => {
const bedrockModel = {
...anthropicModel,
providerID: "amazon-bedrock",
api: {
id: "us.anthropic.claude-opus-4-6-v1",
url: "https://bedrock-runtime.us-east-1.amazonaws.com",
npm: "@ai-sdk/amazon-bedrock",
},
}

const msgs = [
{ role: "assistant", content: "" },
{
role: "assistant",
content: [
{ type: "text", text: "" },
{ type: "text", text: "Hello" },
],
},
{
role: "assistant",
content: [
{ type: "reasoning", text: "" },
{ type: "text", text: "Answer" },
],
},
] as any[]

const result = ProviderTransform.message(msgs, bedrockModel, {})

expect(result).toHaveLength(2)
expect(result[0].content).toHaveLength(1)
expect(result[0].content[0]).toEqual({ type: "text", text: "Hello" })
expect(result[1].content).toHaveLength(1)
expect(result[1].content[0]).toEqual({ type: "text", text: "Answer" })
})

test("filters empty content for vertex claude models", () => {
const vertexModel = {
...anthropicModel,
providerID: "google-vertex",
api: {
id: "claude-opus-4-6@20260205",
url: "https://us-east5-aiplatform.googleapis.com",
npm: "@ai-sdk/google-vertex",
},
}

const msgs = [
{ role: "assistant", content: "" },
{ role: "user", content: "World" },
] as any[]

const result = ProviderTransform.message(msgs, vertexModel, {})

expect(result).toHaveLength(1)
expect(result[0].content).toBe("World")
})
})

describe("ProviderTransform.message - strip openai metadata when store=false", () => {
Expand Down
Loading
Loading