Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions apps/web/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,11 @@ LOG_ZOD_ERRORS=true
# ECONOMY_LLM_MODEL=llama-3.1-8b-instant
# GROQ_API_KEY=

# --- Ollama (Local LLM) ---
# DEFAULT_LLM_PROVIDER=ollama
# OLLAMA_MODEL=llama3
# OLLAMA_BASE_URL=http://localhost:11434/api

# =============================================================================
# Everything below is optional
# =============================================================================
Expand Down
3 changes: 1 addition & 2 deletions apps/web/env.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ export const env = createEnv({
OPENROUTER_API_KEY: z.string().optional(),
AI_GATEWAY_API_KEY: z.string().optional(),
OLLAMA_BASE_URL: z.string().optional(),
OLLAMA_MODEL: z.string().optional(),

OPENAI_ZERO_DATA_RETENTION: z.coerce.boolean().optional().default(false),

Expand Down Expand Up @@ -178,7 +179,6 @@ export const env = createEnv({
if (!value) return;
return value.split(",");
}),
NEXT_PUBLIC_OLLAMA_MODEL: z.string().optional(),
NEXT_PUBLIC_DUB_REFER_DOMAIN: z.string().optional(),
NEXT_PUBLIC_DISABLE_REFERRAL_SIGNATURE: z.coerce
.boolean()
Expand Down Expand Up @@ -236,7 +236,6 @@ export const env = createEnv({
NEXT_PUBLIC_AXIOM_DATASET: process.env.NEXT_PUBLIC_AXIOM_DATASET,
NEXT_PUBLIC_AXIOM_TOKEN: process.env.NEXT_PUBLIC_AXIOM_TOKEN,
NEXT_PUBLIC_LOG_SCOPES: process.env.NEXT_PUBLIC_LOG_SCOPES,
NEXT_PUBLIC_OLLAMA_MODEL: process.env.NEXT_PUBLIC_OLLAMA_MODEL,
NEXT_PUBLIC_DUB_REFER_DOMAIN: process.env.NEXT_PUBLIC_DUB_REFER_DOMAIN,
NEXT_PUBLIC_DISABLE_REFERRAL_SIGNATURE:
process.env.NEXT_PUBLIC_DISABLE_REFERRAL_SIGNATURE,
Expand Down
2 changes: 1 addition & 1 deletion apps/web/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@
"next-themes": "0.4.6",
"nodemailer": "7.0.11",
"nuqs": "2.8.2",
"ollama-ai-provider": "1.2.0",
"ollama-ai-provider-v2": "1.5.5",
"openai": "6.9.1",
"p-queue": "9.0.1",
"p-retry": "7.1.0",
Expand Down
1 change: 0 additions & 1 deletion apps/web/utils/actions/settings.validation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@ export const saveAiSettingsBody = z
Provider.GOOGLE,
Provider.GROQ,
Provider.OPENROUTER,
...(Provider.OLLAMA ? [Provider.OLLAMA] : []),
]),
aiModel: z.string(),
aiApiKey: z.string().optional(),
Expand Down
7 changes: 1 addition & 6 deletions apps/web/utils/llms/config.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import { env } from "@/env";

export const supportsOllama = !!env.NEXT_PUBLIC_OLLAMA_MODEL;

export const DEFAULT_PROVIDER = "DEFAULT";

export const Provider = {
Expand All @@ -12,7 +10,7 @@ export const Provider = {
GROQ: "groq",
OPENROUTER: "openrouter",
AI_GATEWAY: "aigateway",
...(supportsOllama ? { OLLAMA: "ollama" } : {}),
...(env.OLLAMA_MODEL ? { OLLAMA: "ollama" } : {}),
};

export const providerOptions: { label: string; value: string }[] = [
Expand All @@ -23,7 +21,4 @@ export const providerOptions: { label: string; value: string }[] = [
{ label: "Groq", value: Provider.GROQ },
{ label: "OpenRouter", value: Provider.OPENROUTER },
{ label: "AI Gateway", value: Provider.AI_GATEWAY },
...(supportsOllama && Provider.OLLAMA
? [{ label: "Ollama", value: Provider.OLLAMA }]
: []),
];
33 changes: 19 additions & 14 deletions apps/web/utils/llms/model.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ vi.mock("@openrouter/ai-sdk-provider", () => ({
})),
}));

vi.mock("ollama-ai-provider", () => ({
vi.mock("ollama-ai-provider-v2", () => ({
createOllama: vi.fn(() => (model: string) => ({ model })),
}));

Expand All @@ -50,8 +50,8 @@ vi.mock("@/env", () => ({
ANTHROPIC_API_KEY: "test-anthropic-key",
GROQ_API_KEY: "test-groq-key",
OPENROUTER_API_KEY: "test-openrouter-key",
OLLAMA_BASE_URL: "http://localhost:11434",
NEXT_PUBLIC_OLLAMA_MODEL: "llama3",
OLLAMA_BASE_URL: "http://localhost:11434/api",
OLLAMA_MODEL: "llama3",
BEDROCK_REGION: "us-west-2",
BEDROCK_ACCESS_KEY: "",
BEDROCK_SECRET_KEY: "",
Expand Down Expand Up @@ -153,18 +153,23 @@ describe("Models", () => {
expect(result.model).toBeDefined();
});

// it("should configure Ollama model correctly", () => {
// const userAi: UserAIFields = {
// aiApiKey: "user-api-key",
// aiProvider: Provider.OLLAMA!,
// aiModel: "llama3",
// };
it("should configure Ollama model correctly via env vars", () => {
const userAi: UserAIFields = {
aiApiKey: null,
aiProvider: null,
aiModel: null,
};

// const result = getModel(userAi);
// expect(result.provider).toBe(Provider.OLLAMA);
// expect(result.modelName).toBe("llama3");
// expect(result.model).toBeDefined();
// });
vi.mocked(env).DEFAULT_LLM_PROVIDER = "ollama";
vi.mocked(env).OLLAMA_MODEL = "llama3";
vi.mocked(env).OLLAMA_BASE_URL = "http://localhost:11434/api";

const result = getModel(userAi);
expect(result.provider).toBe(Provider.OLLAMA);
expect(result.modelName).toBe("llama3");
expect(result.model).toBeDefined();
expect(result.backupModel).toBeNull(); // No backup for local Ollama
});

it("should configure Anthropic model correctly without Bedrock credentials", () => {
const userAi: UserAIFields = {
Expand Down
24 changes: 13 additions & 11 deletions apps/web/utils/llms/model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { createGoogleGenerativeAI } from "@ai-sdk/google";
import { createGroq } from "@ai-sdk/groq";
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
import { createGateway } from "@ai-sdk/gateway";
// import { createOllama } from "ollama-ai-provider";
import { createOllama } from "ollama-ai-provider-v2";
import { env } from "@/env";
import { Provider } from "@/utils/llms/config";
import type { UserAIFields } from "@/utils/llms/types";
Expand Down Expand Up @@ -139,16 +139,17 @@ function selectModel(
};
}
case Provider.OLLAMA: {
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

case Provider.OLLAMA is unreachable when env.OLLAMA_MODEL is unset (since Provider.OLLAMA is then undefined). This makes aiProvider === 'ollama' hit the default with a misleading error. Consider matching the literal 'ollama' (or validate earlier) so the intended OLLAMA_MODEL error is thrown.

Suggested change
case Provider.OLLAMA: {
case "ollama": {

🚀 Reply to ask Macroscope to explain or update this suggestion.

👍 Helpful? React to give us feedback.

throw new Error(
"Ollama is not supported. Revert to version v1.7.28 or older to use it.",
);
// const modelName = aiModel || env.NEXT_PUBLIC_OLLAMA_MODEL;
// if (!modelName) throw new Error("Ollama model is not set");
// return {
// provider: Provider.OLLAMA!,
// modelName,
// model: createOllama({ baseURL: env.OLLAMA_BASE_URL })(model),
// };
const modelName = env.OLLAMA_MODEL;
const provider = Provider.OLLAMA;
if (!modelName)
throw new Error("OLLAMA_MODEL environment variable is not set");
if (!provider) throw new Error("Provider.OLLAMA is not defined");
return {
provider,
modelName,
model: createOllama({ baseURL: env.OLLAMA_BASE_URL })(modelName),
backupModel: null,
};
}

case Provider.BEDROCK: {
Expand Down Expand Up @@ -343,6 +344,7 @@ function getProviderApiKey(provider: string) {
[Provider.GROQ]: env.GROQ_API_KEY,
[Provider.OPENROUTER]: env.OPENROUTER_API_KEY,
[Provider.AI_GATEWAY]: env.AI_GATEWAY_API_KEY,
...(Provider.OLLAMA ? { [Provider.OLLAMA]: "ollama-local" } : {}),
};

return providerApiKeys[provider];
Expand Down
2 changes: 1 addition & 1 deletion docs/hosting/environment-variables.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ cp apps/web/.env.example apps/web/.env
| `BEDROCK_REGION` | No | AWS region for Bedrock | `us-west-2` |
| **Ollama (Local LLM)** ||||
| `OLLAMA_BASE_URL` | No | Ollama API endpoint (e.g., `http://localhost:11434/api`) | — |
| `NEXT_PUBLIC_OLLAMA_MODEL` | No | Model to use with Ollama | — |
| `OLLAMA_MODEL` | No | Model to use with Ollama (e.g., `llama3`) | — |
| **Background Jobs (QStash)** ||||
| `QSTASH_TOKEN` | No | QStash API token | — |
| `QSTASH_CURRENT_SIGNING_KEY` | No | Current signing key for webhooks | — |
Expand Down
15 changes: 15 additions & 0 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion turbo.json
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
"OPENROUTER_API_KEY",
"AI_GATEWAY_API_KEY",
"OLLAMA_BASE_URL",
"OLLAMA_MODEL",

"UPSTASH_REDIS_URL",
"UPSTASH_REDIS_TOKEN",
Expand Down Expand Up @@ -126,7 +127,6 @@
"NEXT_PUBLIC_WELCOME_UPGRADE_ENABLED",
"NEXT_PUBLIC_AXIOM_DATASET",
"NEXT_PUBLIC_AXIOM_TOKEN",
"NEXT_PUBLIC_OLLAMA_MODEL",
"NEXT_PUBLIC_DUB_REFER_DOMAIN",
"NEXT_PUBLIC_USE_AEONIK_FONT"
],
Expand Down
2 changes: 1 addition & 1 deletion version.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
v2.21.59
v2.21.60
Loading