diff --git a/.changeset/spread-anthropic-provider-options.md b/.changeset/spread-anthropic-provider-options.md new file mode 100644 index 000000000000..7100e2b1c945 --- /dev/null +++ b/.changeset/spread-anthropic-provider-options.md @@ -0,0 +1,5 @@ +--- +'@ai-sdk/anthropic': patch +--- + +Spread message-level `providerOptions.anthropic` (excluding cache control keys) onto assistant messages, enabling custom fields like `reasoning_content` to pass through to the HTTP body. diff --git a/packages/anthropic/src/convert-to-anthropic-messages-prompt.test.ts b/packages/anthropic/src/convert-to-anthropic-messages-prompt.test.ts index baf3ca326cda..5763f5b84cd6 100644 --- a/packages/anthropic/src/convert-to-anthropic-messages-prompt.test.ts +++ b/packages/anthropic/src/convert-to-anthropic-messages-prompt.test.ts @@ -1961,6 +1961,57 @@ describe('assistant messages', () => { `); }); }); + + it('should spread message-level providerOptions.anthropic onto assistant message', async () => { + const result = await convertToAnthropicMessagesPrompt({ + prompt: [ + { + role: 'assistant', + content: [{ type: 'text', text: 'Hello' }], + providerOptions: { + anthropic: { + reasoning_content: 'I thought about it...', + }, + }, + }, + ], + sendReasoning: true, + warnings: [], + toolNameMapping: defaultToolNameMapping, + }); + + const msg = result.prompt.messages[0] as any; + expect(msg.reasoning_content).toBe('I thought about it...'); + expect(msg.content).toEqual([ + { type: 'text', text: 'Hello', cache_control: undefined }, + ]); + }); + + it('should exclude cacheControl and cache_control from spread', async () => { + const result = await convertToAnthropicMessagesPrompt({ + prompt: [ + { + role: 'assistant', + content: [{ type: 'text', text: 'Hello' }], + providerOptions: { + anthropic: { + cacheControl: { type: 'ephemeral' }, + cache_control: { type: 'ephemeral' }, + reasoning_content: 'thinking...', + }, + }, + }, + ], + sendReasoning: true, + warnings: [], + toolNameMapping: defaultToolNameMapping, + }); + + const msg = result.prompt.messages[0] as any; + expect(msg.reasoning_content).toBe('thinking...'); + expect(msg.cacheControl).toBeUndefined(); + expect(msg.cache_control).toBeUndefined(); + }); }); describe('cache control', () => { diff --git a/packages/anthropic/src/convert-to-anthropic-messages-prompt.ts b/packages/anthropic/src/convert-to-anthropic-messages-prompt.ts index ea847a52123f..52a256261f69 100644 --- a/packages/anthropic/src/convert-to-anthropic-messages-prompt.ts +++ b/packages/anthropic/src/convert-to-anthropic-messages-prompt.ts @@ -992,7 +992,25 @@ export async function convertToAnthropicMessagesPrompt({ } } - messages.push({ role: 'assistant', content: anthropicContent }); + // Collect message-level providerOptions.anthropic (excluding cache + // control keys) and spread onto the message, mirroring how the + // openai-compatible provider handles providerOptions.openaiCompatible. + const extra: Record = {}; + for (const message of block.messages) { + const opts = message.providerOptions?.anthropic; + if (opts != null && typeof opts === 'object') { + for (const [k, v] of Object.entries(opts)) { + if (k === 'cacheControl' || k === 'cache_control') continue; + extra[k] = v; + } + } + } + + messages.push({ + role: 'assistant', + content: anthropicContent, + ...extra, + } as AnthropicAssistantMessage); break; }