Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions apps/web/env.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ export const env = createEnv({
EMAIL_ENCRYPT_SALT: z.string(),

DEFAULT_LLM_PROVIDER: z
// custom is deprecated
.enum([...llmProviderEnum.options, "custom"])
.default("anthropic"),
DEFAULT_LLM_MODEL: z.string().optional(),
Comment on lines 30 to 34
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

Blocker: Env schema still allows "custom" while runtime support was removed. Fail fast instead of crashing later.

Provider.CUSTOM is gone (apps/web/utils/llms/config.ts), but the env schema still accepts "custom". If DEFAULT_LLM_PROVIDER=custom (or persisted configs set it), selectModel will hit the default case and throw at runtime. Remove "custom" from the enum so misconfigs are caught at startup.

Apply this diff:

-    DEFAULT_LLM_PROVIDER: z
-      // custom is deprecated
-      .enum([...llmProviderEnum.options, "custom"])
-      .default("anthropic"),
+    DEFAULT_LLM_PROVIDER: llmProviderEnum.default("anthropic"),

Optional: if you prefer a friendlier error message during migration, keep accepting the literal but reject it explicitly:

-    DEFAULT_LLM_PROVIDER: z
-      .enum([...llmProviderEnum.options, "custom"])
-      .default("anthropic"),
+    DEFAULT_LLM_PROVIDER: z
+      .enum(llmProviderEnum.options)
+      .or(z.literal("custom"))
+      .refine((v) => v !== "custom", {
+        message:
+          'DEFAULT_LLM_PROVIDER "custom" is deprecated. Use one of: ' +
+          llmProviderEnum.options.join(", "),
+      })
+      .transform((v) => (v === "custom" ? "anthropic" : v))
+      .default("anthropic"),
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
DEFAULT_LLM_PROVIDER: z
// custom is deprecated
.enum([...llmProviderEnum.options, "custom"])
.default("anthropic"),
DEFAULT_LLM_MODEL: z.string().optional(),
DEFAULT_LLM_PROVIDER: llmProviderEnum.default("anthropic"),
DEFAULT_LLM_MODEL: z.string().optional(),
🤖 Prompt for AI Agents
In apps/web/env.ts around lines 30 to 34, the zod env schema still includes the
deprecated "custom" value in DEFAULT_LLM_PROVIDER which causes a runtime crash
if used; remove "custom" from the enum options so invalid configs fail at
startup. Update the enum to exclude "custom" (or, if you want a
migration-friendly message, accept the literal but add an explicit refine/check
that throws a clear error instructing to change "custom" to a supported
provider). Ensure DEFAULT_LLM_PROVIDER still has the same default after the
change and run schema validation in startup to catch bad env values early.

Expand Down
1 change: 0 additions & 1 deletion apps/web/utils/llms/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ export const Provider = {
GROQ: "groq",
OPENROUTER: "openrouter",
AI_GATEWAY: "aigateway",
CUSTOM: "custom",
...(supportsOllama ? { OLLAMA: "ollama" } : {}),
};

Expand Down
24 changes: 0 additions & 24 deletions apps/web/utils/llms/model.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -314,29 +314,5 @@ describe("Models", () => {
"Anthropic",
]);
});

it("should preserve custom logic and not override with default provider options", () => {
const userAi: UserAIFields = {
aiApiKey: null,
aiProvider: null,
aiModel: null,
};

vi.mocked(env).DEFAULT_LLM_PROVIDER = "custom";
vi.mocked(env).DEFAULT_OPENROUTER_PROVIDERS = "Should Not Override";
vi.mocked(env).OPENROUTER_API_KEY = "test-openrouter-key";

const result = getModel(userAi, "default");
expect(result.provider).toBe(Provider.OPENROUTER);
// Should have custom logic provider options, not the default ones
expect(result.providerOptions?.openrouter?.provider?.order).toEqual([
"Google Vertex",
"Google AI Studio",
]);
// Should NOT contain the DEFAULT_OPENROUTER_PROVIDERS value
expect(result.providerOptions?.openrouter?.provider?.order).not.toContain(
"Should Not Override",
);
});
});
});
85 changes: 14 additions & 71 deletions apps/web/utils/llms/model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -286,8 +286,7 @@ function selectDefaultModel(userAi: UserAIFields): SelectModel {
let aiModel: string | null = null;
const aiApiKey = userAi.aiApiKey;

const providerOptions: Record<string, any> =
createOpenRouterProviderOptions("");
const providerOptions: Record<string, any> = {};

// If user has not api key set, then use default model
// If they do they can use the model of their choice
Expand All @@ -297,79 +296,23 @@ function selectDefaultModel(userAi: UserAIFields): SelectModel {
} else {
aiProvider = env.DEFAULT_LLM_PROVIDER;
aiModel = env.DEFAULT_LLM_MODEL || null;

// Allow custom logic in production with fallbacks that doesn't impact self-hosters
if (aiProvider === Provider.CUSTOM) {
// choose randomly between bedrock sonnet 3.7, sonnet 4, and openrouter
const models = [
// {
// provider: Provider.ANTHROPIC,
// modelName: Model.CLAUDE_3_7_SONNET_BEDROCK,
// },
// {
// provider: Provider.ANTHROPIC,
// modelName: Model.CLAUDE_4_SONNET_BEDROCK,
// },
{
provider: Provider.OPENROUTER,
modelName: null,
},
];

const selectedProviderAndModel =
models[Math.floor(Math.random() * models.length)];

aiProvider = selectedProviderAndModel.provider;
aiModel = selectedProviderAndModel.modelName;

if (aiProvider === Provider.OPENROUTER) {
function selectRandomModel() {
// to avoid rate limits, we'll select a random model
const models = [
"google/gemini-2.5-pro",
// "anthropic/claude-sonnet-4",
// "anthropic/claude-3.7-sonnet",
];
return models[Math.floor(Math.random() * models.length)];
}
aiModel = selectRandomModel() || null;
providerOptions.openrouter = {
models: [
"google/gemini-2.5-pro",
// "anthropic/claude-sonnet-4",
// "anthropic/claude-3.7-sonnet",
],
provider: {
// max 3 options
order: [
"Google Vertex",
"Google AI Studio",
// "Anthropic",
// "Amazon Bedrock",
],
},
};
} else {
return selectModel({
aiProvider: Provider.ANTHROPIC,
aiModel,
aiApiKey: null,
});
}
}
}

// Configure OpenRouter provider options if using OpenRouter for default model
// (but not overriding custom logic which already sets its own provider options)
if (
aiProvider === Provider.OPENROUTER &&
env.DEFAULT_OPENROUTER_PROVIDERS &&
!providerOptions.openrouter
) {
if (aiProvider === Provider.OPENROUTER) {
const openRouterOptions = createOpenRouterProviderOptions(
env.DEFAULT_OPENROUTER_PROVIDERS,
env.DEFAULT_OPENROUTER_PROVIDERS || "",
);
Object.assign(providerOptions, openRouterOptions);

// Preserve any custom options set earlier; always ensure reasoning exists.
const existingOpenRouterOptions = providerOptions.openrouter || {};
providerOptions.openrouter = {
...openRouterOptions.openrouter,
...existingOpenRouterOptions,
reasoning: {
...openRouterOptions.openrouter.reasoning,
...(existingOpenRouterOptions.reasoning ?? {}),
},
};
}

return selectModel(
Expand Down
2 changes: 1 addition & 1 deletion version.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
v2.9.36
v2.9.37
Loading