Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion packages/types/npm/package.metadata.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@roo-code/types",
"version": "1.60.0",
"version": "1.61.0",
"description": "TypeScript type definitions for Roo Code.",
"publishConfig": {
"access": "public",
Expand Down
1 change: 1 addition & 0 deletions packages/types/src/global-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -198,6 +198,7 @@ export const SECRET_STATE_KEYS = [
"fireworksApiKey",
"featherlessApiKey",
"ioIntelligenceApiKey",
"vercelAiGatewayApiKey",
] as const satisfies readonly (keyof ProviderSettings)[]
export type SecretState = Pick<ProviderSettings, (typeof SECRET_STATE_KEYS)[number]>

Expand Down
11 changes: 11 additions & 0 deletions packages/types/src/provider-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ export const providerNames = [
"featherless",
"io-intelligence",
"roo",
"vercel-ai-gateway",
] as const

export const providerNamesSchema = z.enum(providerNames)
Expand Down Expand Up @@ -321,6 +322,11 @@ const rooSchema = apiModelIdProviderModelSchema.extend({
// No additional fields needed - uses cloud authentication
})

const vercelAiGatewaySchema = baseProviderSettingsSchema.extend({
vercelAiGatewayApiKey: z.string().optional(),
vercelAiGatewayModelId: z.string().optional(),
})

const defaultSchema = z.object({
apiProvider: z.undefined(),
})
Expand Down Expand Up @@ -360,6 +366,7 @@ export const providerSettingsSchemaDiscriminated = z.discriminatedUnion("apiProv
ioIntelligenceSchema.merge(z.object({ apiProvider: z.literal("io-intelligence") })),
qwenCodeSchema.merge(z.object({ apiProvider: z.literal("qwen-code") })),
rooSchema.merge(z.object({ apiProvider: z.literal("roo") })),
vercelAiGatewaySchema.merge(z.object({ apiProvider: z.literal("vercel-ai-gateway") })),
defaultSchema,
])

Expand Down Expand Up @@ -399,6 +406,7 @@ export const providerSettingsSchema = z.object({
...ioIntelligenceSchema.shape,
...qwenCodeSchema.shape,
...rooSchema.shape,
...vercelAiGatewaySchema.shape,
...codebaseIndexProviderSchema.shape,
})

Expand All @@ -425,6 +433,7 @@ export const MODEL_ID_KEYS: Partial<keyof ProviderSettings>[] = [
"litellmModelId",
"huggingFaceModelId",
"ioIntelligenceModelId",
"vercelAiGatewayModelId",
]

export const getModelId = (settings: ProviderSettings): string | undefined => {
Expand Down Expand Up @@ -541,6 +550,7 @@ export const MODELS_BY_PROVIDER: Record<
openrouter: { id: "openrouter", label: "OpenRouter", models: [] },
requesty: { id: "requesty", label: "Requesty", models: [] },
unbound: { id: "unbound", label: "Unbound", models: [] },
"vercel-ai-gateway": { id: "vercel-ai-gateway", label: "Vercel AI Gateway", models: [] },
}

export const dynamicProviders = [
Expand All @@ -550,6 +560,7 @@ export const dynamicProviders = [
"openrouter",
"requesty",
"unbound",
"vercel-ai-gateway",
] as const satisfies readonly ProviderName[]

export type DynamicProvider = (typeof dynamicProviders)[number]
Expand Down
1 change: 1 addition & 0 deletions packages/types/src/providers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,4 +27,5 @@ export * from "./unbound.js"
export * from "./vertex.js"
export * from "./vscode-llm.js"
export * from "./xai.js"
export * from "./vercel-ai-gateway.js"
export * from "./zai.js"
102 changes: 102 additions & 0 deletions packages/types/src/providers/vercel-ai-gateway.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
import type { ModelInfo } from "../model.js"

// https://ai-gateway.vercel.sh/v1/
export const vercelAiGatewayDefaultModelId = "anthropic/claude-sonnet-4"

export const VERCEL_AI_GATEWAY_PROMPT_CACHING_MODELS = new Set([
"anthropic/claude-3-haiku",
"anthropic/claude-3-opus",
"anthropic/claude-3.5-haiku",
"anthropic/claude-3.5-sonnet",
"anthropic/claude-3.7-sonnet",
"anthropic/claude-opus-4",
"anthropic/claude-opus-4.1",
"anthropic/claude-sonnet-4",
"openai/gpt-4.1",
"openai/gpt-4.1-mini",
"openai/gpt-4.1-nano",
"openai/gpt-4o",
"openai/gpt-4o-mini",
"openai/gpt-5",
"openai/gpt-5-mini",
"openai/gpt-5-nano",
"openai/o1",
"openai/o3",
"openai/o3-mini",
"openai/o4-mini",
])

export const VERCEL_AI_GATEWAY_VISION_ONLY_MODELS = new Set([
"alibaba/qwen-3-14b",
"alibaba/qwen-3-235b",
"alibaba/qwen-3-30b",
"alibaba/qwen-3-32b",
"alibaba/qwen3-coder",
"amazon/nova-pro",
"anthropic/claude-3.5-haiku",
"google/gemini-1.5-flash-8b",
"google/gemini-2.0-flash-thinking",
"google/gemma-3-27b",
"mistral/devstral-small",
"xai/grok-vision-beta",
])

export const VERCEL_AI_GATEWAY_VISION_AND_TOOLS_MODELS = new Set([
"amazon/nova-lite",
"anthropic/claude-3-haiku",
"anthropic/claude-3-opus",
"anthropic/claude-3-sonnet",
"anthropic/claude-3.5-sonnet",
"anthropic/claude-3.7-sonnet",
"anthropic/claude-opus-4",
"anthropic/claude-opus-4.1",
"anthropic/claude-sonnet-4",
"google/gemini-1.5-flash",
"google/gemini-1.5-pro",
"google/gemini-2.0-flash",
"google/gemini-2.0-flash-lite",
"google/gemini-2.0-pro",
"google/gemini-2.5-flash",
"google/gemini-2.5-flash-lite",
"google/gemini-2.5-pro",
"google/gemini-exp",
"meta/llama-3.2-11b",
"meta/llama-3.2-90b",
"meta/llama-3.3",
"meta/llama-4-maverick",
"meta/llama-4-scout",
"mistral/pixtral-12b",
"mistral/pixtral-large",
"moonshotai/kimi-k2",
"openai/gpt-4-turbo",
"openai/gpt-4.1",
"openai/gpt-4.1-mini",
"openai/gpt-4.1-nano",
"openai/gpt-4.5-preview",
"openai/gpt-4o",
"openai/gpt-4o-mini",
"openai/gpt-oss-120b",
"openai/gpt-oss-20b",
"openai/o3",
"openai/o3-pro",
"openai/o4-mini",
"vercel/v0-1.0-md",
"xai/grok-2-vision",
"zai/glm-4.5v",
])

export const vercelAiGatewayDefaultModelInfo: ModelInfo = {
maxTokens: 64000,
contextWindow: 200000,
supportsImages: true,
supportsComputerUse: true,
supportsPromptCache: true,
inputPrice: 3,
outputPrice: 15,
cacheWritesPrice: 3.75,
cacheReadsPrice: 0.3,
description:
"Claude Sonnet 4 significantly improves on Sonnet 3.7's industry-leading capabilities, excelling in coding with a state-of-the-art 72.7% on SWE-bench. The model balances performance and efficiency for internal and external use cases, with enhanced steerability for greater control over implementations. While not matching Opus 4 in most domains, it delivers an optimal mix of capability and practicality.",
}

export const VERCEL_AI_GATEWAY_DEFAULT_TEMPERATURE = 0.7
18 changes: 9 additions & 9 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions src/api/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ import {
FireworksHandler,
RooHandler,
FeatherlessHandler,
VercelAiGatewayHandler,
} from "./providers"
import { NativeOllamaHandler } from "./providers/native-ollama"

Expand Down Expand Up @@ -151,6 +152,8 @@ export function buildApiHandler(configuration: ProviderSettings): ApiHandler {
return new RooHandler(options)
case "featherless":
return new FeatherlessHandler(options)
case "vercel-ai-gateway":
return new VercelAiGatewayHandler(options)
default:
apiProvider satisfies "gemini-cli" | undefined
return new AnthropicHandler(options)
Expand Down
Loading
Loading