Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 52 additions & 0 deletions src/api/providers/fetchers/__tests__/openrouter.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ describe("OpenRouter API", () => {
supportsReasoningEffort: false,
supportsNativeTools: true,
supportedParameters: ["max_tokens", "temperature", "reasoning", "include_reasoning"],
defaultToolProtocol: "native",
})

expect(models["anthropic/claude-3.7-sonnet:thinking"]).toEqual({
Expand All @@ -47,6 +48,7 @@ describe("OpenRouter API", () => {
supportsReasoningEffort: true,
supportsNativeTools: true,
supportedParameters: ["max_tokens", "temperature", "reasoning", "include_reasoning"],
defaultToolProtocol: "native",
})

expect(models["google/gemini-2.5-flash-preview-05-20"].maxTokens).toEqual(65535)
Expand Down Expand Up @@ -390,5 +392,55 @@ describe("OpenRouter API", () => {
expect(textResult.maxTokens).toBe(64000)
expect(imageResult.maxTokens).toBe(64000)
})

it("sets defaultToolProtocol to native when model supports native tools", () => {
const mockModel = {
name: "Tools Model",
description: "Model with native tool support",
context_length: 128000,
max_completion_tokens: 8192,
pricing: {
prompt: "0.000003",
completion: "0.000015",
},
}

const resultWithTools = parseOpenRouterModel({
id: "test/tools-model",
model: mockModel,
inputModality: ["text"],
outputModality: ["text"],
maxTokens: 8192,
supportedParameters: ["tools", "max_tokens", "temperature"],
})

expect(resultWithTools.supportsNativeTools).toBe(true)
expect(resultWithTools.defaultToolProtocol).toBe("native")
})

it("does not set defaultToolProtocol when model does not support native tools", () => {
const mockModel = {
name: "No Tools Model",
description: "Model without native tool support",
context_length: 128000,
max_completion_tokens: 8192,
pricing: {
prompt: "0.000003",
completion: "0.000015",
},
}

const resultWithoutTools = parseOpenRouterModel({
id: "test/no-tools-model",
model: mockModel,
inputModality: ["text"],
outputModality: ["text"],
maxTokens: 8192,
supportedParameters: ["max_tokens", "temperature"],
})

expect(resultWithoutTools.supportsNativeTools).toBe(false)
expect(resultWithoutTools.defaultToolProtocol).toBeUndefined()
})
})
})
6 changes: 5 additions & 1 deletion src/api/providers/fetchers/openrouter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,8 @@ export const parseOpenRouterModel = ({

const supportsPromptCache = typeof cacheReadsPrice !== "undefined" // some models support caching but don't charge a cacheWritesPrice, e.g. GPT-5

const supportsNativeTools = supportedParameters ? supportedParameters.includes("tools") : undefined

const modelInfo: ModelInfo = {
maxTokens: maxTokens || Math.ceil(model.context_length * 0.2),
contextWindow: model.context_length,
Expand All @@ -218,8 +220,10 @@ export const parseOpenRouterModel = ({
cacheReadsPrice,
description: model.description,
supportsReasoningEffort: supportedParameters ? supportedParameters.includes("reasoning") : undefined,
supportsNativeTools: supportedParameters ? supportedParameters.includes("tools") : undefined,
supportsNativeTools,
supportedParameters: supportedParameters ? supportedParameters.filter(isModelParameter) : undefined,
// Default to native tool protocol when native tools are supported
defaultToolProtocol: supportsNativeTools ? ("native" as const) : undefined,
}

if (OPEN_ROUTER_REASONING_BUDGET_MODELS.has(id)) {
Expand Down
Loading