Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,8 @@ pnpm install

Set the environment variables in the newly created `.env`. You can see a list of required variables in: `apps/web/env.ts`.

For a comprehensive reference of all environment variables, see the [Environment Variables Guide](docs/hosting/environment-variables.md).

The required environment variables:

- `AUTH_SECRET` -- can be any random string (try using `openssl rand -hex 32` for a quick secure random string)
Expand Down
2 changes: 1 addition & 1 deletion apps/unsubscriber/src/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ export function getModel(provider: LLMProvider) {
case "openai":
return openai("gpt-4o-mini");
case "anthropic":
return anthropic("claude-3-7-sonnet-20250219");
return anthropic("claude-sonnet-4-5-20250929");
case "bedrock":
return bedrock("anthropic.claude-3-7-sonnet-20250219-v1:0");
Comment thread
elie222 marked this conversation as resolved.
default:
Expand Down
62 changes: 49 additions & 13 deletions apps/web/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -18,30 +18,66 @@ GOOGLE_PUBSUB_VERIFICATION_TOKEN= # openssl rand -hex 32
# Outlook
MICROSOFT_CLIENT_ID=
MICROSOFT_CLIENT_SECRET=
MICROSOFT_WEBHOOK_CLIENT_STATE=
MICROSOFT_WEBHOOK_CLIENT_STATE= # openssl rand -hex 32

# Disable premium
NEXT_PUBLIC_BYPASS_PREMIUM_CHECKS=true

# LLM config
DEFAULT_LLM_PROVIDER=openrouter
DEFAULT_LLM_MODEL=anthropic/claude-sonnet-4.5

# Economy LLM configuration (for large context windows where cost efficiency matters)
ECONOMY_LLM_PROVIDER=openrouter
ECONOMY_LLM_MODEL=anthropic/claude-haiku-4.5

# Set at least one of the following:
OPENROUTER_API_KEY=
# AI_GATEWAY_API_KEY=
# =============================================================================
# LLM Configuration - Uncomment ONE provider block
# =============================================================================

# --- OpenRouter ---
# DEFAULT_LLM_PROVIDER=openrouter
# DEFAULT_LLM_MODEL=anthropic/claude-sonnet-4.5
# ECONOMY_LLM_PROVIDER=openrouter
# ECONOMY_LLM_MODEL=anthropic/claude-haiku-4.5
# OPENROUTER_API_KEY=

# --- Anthropic ---
# DEFAULT_LLM_PROVIDER=anthropic
# DEFAULT_LLM_MODEL=claude-sonnet-4-5-20250514
# ECONOMY_LLM_PROVIDER=anthropic
# ECONOMY_LLM_MODEL=claude-haiku-4-5-20250514
# ANTHROPIC_API_KEY=

# --- OpenAI ---
# DEFAULT_LLM_PROVIDER=openai
# DEFAULT_LLM_MODEL=gpt-4o
# ECONOMY_LLM_PROVIDER=openai
# ECONOMY_LLM_MODEL=gpt-4o-mini
# OPENAI_API_KEY=

# --- Google Vertex ---
# DEFAULT_LLM_PROVIDER=google
# DEFAULT_LLM_MODEL=gemini-2.5-pro
# ECONOMY_LLM_PROVIDER=google
# ECONOMY_LLM_MODEL=gemini-2.5-flash
# GOOGLE_API_KEY=
# GROQ_API_KEY=

# --- Bedrock ---
# DEFAULT_LLM_PROVIDER=bedrock
# DEFAULT_LLM_MODEL=global.anthropic.claude-sonnet-4-5-20250929-v1:0
# ECONOMY_LLM_PROVIDER=bedrock
# ECONOMY_LLM_MODEL=global.anthropic.claude-haiku-4-5-20251001-v1:0
# BEDROCK_ACCESS_KEY=
# BEDROCK_SECRET_KEY=
# BEDROCK_REGION=us-west-2

# --- Vercel AI Gateway ---
# DEFAULT_LLM_PROVIDER=aigateway
# DEFAULT_LLM_MODEL=anthropic/claude-sonnet-4.5
# ECONOMY_LLM_PROVIDER=aigateway
# ECONOMY_LLM_MODEL=anthropic/claude-haiku-4.5
# AI_GATEWAY_API_KEY=

# --- Groq ---
# DEFAULT_LLM_PROVIDER=groq
# DEFAULT_LLM_MODEL=llama-3.3-70b-versatile
# ECONOMY_LLM_PROVIDER=groq
# ECONOMY_LLM_MODEL=llama-3.1-8b-instant
# GROQ_API_KEY=

INTERNAL_API_KEY= # openssl rand -hex 32
API_KEY_SALT= # openssl rand -hex 32

Expand Down
5 changes: 0 additions & 5 deletions apps/web/env.ts
Original file line number Diff line number Diff line change
Expand Up @@ -173,9 +173,6 @@ export const env = createEnv({
if (!value) return;
return value.split(",");
}),
NEXT_PUBLIC_BEDROCK_SONNET_MODEL: z
.string()
.default("us.anthropic.claude-3-7-sonnet-20250219-v1:0"),
NEXT_PUBLIC_OLLAMA_MODEL: z.string().optional(),
NEXT_PUBLIC_APP_HOME_PATH: z.string().default("/setup"),
NEXT_PUBLIC_DUB_REFER_DOMAIN: z.string().optional(),
Expand Down Expand Up @@ -235,8 +232,6 @@ export const env = createEnv({
NEXT_PUBLIC_AXIOM_DATASET: process.env.NEXT_PUBLIC_AXIOM_DATASET,
NEXT_PUBLIC_AXIOM_TOKEN: process.env.NEXT_PUBLIC_AXIOM_TOKEN,
NEXT_PUBLIC_LOG_SCOPES: process.env.NEXT_PUBLIC_LOG_SCOPES,
NEXT_PUBLIC_BEDROCK_SONNET_MODEL:
process.env.NEXT_PUBLIC_BEDROCK_SONNET_MODEL,
NEXT_PUBLIC_OLLAMA_MODEL: process.env.NEXT_PUBLIC_OLLAMA_MODEL,
NEXT_PUBLIC_APP_HOME_PATH: process.env.NEXT_PUBLIC_APP_HOME_PATH,
NEXT_PUBLIC_DUB_REFER_DOMAIN: process.env.NEXT_PUBLIC_DUB_REFER_DOMAIN,
Expand Down
23 changes: 1 addition & 22 deletions apps/web/utils/llms/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,35 +7,14 @@ export const DEFAULT_PROVIDER = "DEFAULT";
export const Provider = {
OPEN_AI: "openai",
ANTHROPIC: "anthropic",
BEDROCK: "bedrock",
GOOGLE: "google",
GROQ: "groq",
OPENROUTER: "openrouter",
AI_GATEWAY: "aigateway",
...(supportsOllama ? { OLLAMA: "ollama" } : {}),
};

export const Model = {
GPT_4O: "gpt-4o",
GPT_4O_MINI: "gpt-4o-mini",
CLAUDE_3_7_SONNET_BEDROCK: env.NEXT_PUBLIC_BEDROCK_SONNET_MODEL,
CLAUDE_4_SONNET_BEDROCK: "us.anthropic.claude-sonnet-4-20250514-v1:0",
CLAUDE_3_7_SONNET_ANTHROPIC: "claude-3-7-sonnet-20250219",
CLAUDE_3_5_SONNET_OPENROUTER: "anthropic/claude-3.5-sonnet",
CLAUDE_3_7_SONNET_OPENROUTER: "anthropic/claude-3.7-sonnet",
CLAUDE_4_SONNET_OPENROUTER: "anthropic/claude-sonnet-4",
CLAUDE_4_5_SONNET_OPENROUTER: "anthropic/claude-sonnet-4.5",
GEMINI_1_5_PRO: "gemini-1.5-pro-latest",
GEMINI_1_5_FLASH: "gemini-1.5-flash-latest",
GEMINI_2_0_FLASH_LITE: "gemini-2.0-flash-lite",
GEMINI_2_0_FLASH: "gemini-2.0-flash",
GEMINI_2_0_FLASH_OPENROUTER: "google/gemini-2.0-flash",
GEMINI_2_5_PRO_OPENROUTER: "google/gemini-2.5-pro",
GEMINI_3_PRO_PREVIEW_OPENROUTER: "google/gemini-3-pro-preview",
GROQ_LLAMA_3_3_70B: "llama-3.3-70b-versatile",
KIMI_K2_OPENROUTER: "moonshotai/kimi-k2",
...(supportsOllama ? { OLLAMA: env.NEXT_PUBLIC_OLLAMA_MODEL } : {}),
};

export const providerOptions: { label: string; value: string }[] = [
{ label: "Default", value: DEFAULT_PROVIDER },
{ label: "Anthropic", value: Provider.ANTHROPIC },
Expand Down
47 changes: 29 additions & 18 deletions apps/web/utils/llms/model.test.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { describe, it, expect, vi, beforeEach } from "vitest";
import { getModel } from "./model";
import { Provider, Model } from "./config";
import { Provider } from "./config";
import { env } from "@/env";
import type { UserAIFields } from "./types";

Expand Down Expand Up @@ -55,7 +55,6 @@ vi.mock("@/env", () => ({
BEDROCK_REGION: "us-west-2",
BEDROCK_ACCESS_KEY: "",
BEDROCK_SECRET_KEY: "",
NEXT_PUBLIC_BEDROCK_SONNET_MODEL: "anthropic.claude-3-sonnet-20240229-v1:0",
},
}));

Expand All @@ -73,6 +72,9 @@ describe("Models", () => {
beforeEach(() => {
vi.resetAllMocks();
vi.mocked(env).DEFAULT_LLM_PROVIDER = "openai";
vi.mocked(env).DEFAULT_LLM_MODEL = undefined;
vi.mocked(env).BEDROCK_ACCESS_KEY = "";
vi.mocked(env).BEDROCK_SECRET_KEY = "";
});

describe("getModel", () => {
Expand All @@ -92,12 +94,12 @@ describe("Models", () => {
const userAi: UserAIFields = {
aiApiKey: "user-api-key",
aiProvider: Provider.GOOGLE,
aiModel: Model.GEMINI_1_5_PRO,
aiModel: "gemini-1.5-pro-latest",
};

const result = getModel(userAi);
expect(result.provider).toBe(Provider.GOOGLE);
expect(result.modelName).toBe(Model.GEMINI_1_5_PRO);
expect(result.modelName).toBe("gemini-1.5-pro-latest");
});

it("should use user's API key with default provider when only API key is provided", () => {
Expand All @@ -116,38 +118,38 @@ describe("Models", () => {
const userAi: UserAIFields = {
aiApiKey: "user-api-key",
aiProvider: Provider.GOOGLE,
aiModel: Model.GEMINI_1_5_PRO,
aiModel: "gemini-1.5-pro-latest",
};

const result = getModel(userAi);
expect(result.provider).toBe(Provider.GOOGLE);
expect(result.modelName).toBe(Model.GEMINI_1_5_PRO);
expect(result.modelName).toBe("gemini-1.5-pro-latest");
expect(result.model).toBeDefined();
});

it("should configure Groq model correctly", () => {
const userAi: UserAIFields = {
aiApiKey: "user-api-key",
aiProvider: Provider.GROQ,
aiModel: Model.GROQ_LLAMA_3_3_70B,
aiModel: "llama-3.3-70b-versatile",
};

const result = getModel(userAi);
expect(result.provider).toBe(Provider.GROQ);
expect(result.modelName).toBe(Model.GROQ_LLAMA_3_3_70B);
expect(result.modelName).toBe("llama-3.3-70b-versatile");
expect(result.model).toBeDefined();
});

it("should configure OpenRouter model correctly", () => {
const userAi: UserAIFields = {
aiApiKey: "user-api-key",
aiProvider: Provider.OPENROUTER,
aiModel: Model.GROQ_LLAMA_3_3_70B,
aiModel: "llama-3.3-70b-versatile",
};

const result = getModel(userAi);
expect(result.provider).toBe(Provider.OPENROUTER);
expect(result.modelName).toBe(Model.GROQ_LLAMA_3_3_70B);
expect(result.modelName).toBe("llama-3.3-70b-versatile");
expect(result.model).toBeDefined();
});

Expand All @@ -168,31 +170,36 @@ describe("Models", () => {
const userAi: UserAIFields = {
aiApiKey: "user-api-key",
aiProvider: Provider.ANTHROPIC,
aiModel: Model.CLAUDE_3_7_SONNET_ANTHROPIC,
aiModel: "claude-3-7-sonnet-20250219",
};

vi.mocked(env).BEDROCK_ACCESS_KEY = "";
vi.mocked(env).BEDROCK_SECRET_KEY = "";

const result = getModel(userAi);
expect(result.provider).toBe(Provider.ANTHROPIC);
expect(result.modelName).toBe(Model.CLAUDE_3_7_SONNET_ANTHROPIC);
expect(result.modelName).toBe("claude-3-7-sonnet-20250219");
expect(result.model).toBeDefined();
});

it("should configure Anthropic model with Bedrock when Bedrock credentials exist", () => {
it("should configure Bedrock model correctly via env vars", () => {
const userAi: UserAIFields = {
aiApiKey: "user-api-key",
aiProvider: Provider.ANTHROPIC,
aiModel: Model.CLAUDE_3_7_SONNET_BEDROCK,
aiApiKey: null,
aiProvider: null,
aiModel: null,
};

vi.mocked(env).DEFAULT_LLM_PROVIDER = "bedrock";
vi.mocked(env).DEFAULT_LLM_MODEL =
"us.anthropic.claude-3-7-sonnet-20250219-v1:0";
vi.mocked(env).BEDROCK_ACCESS_KEY = "test-bedrock-key";
vi.mocked(env).BEDROCK_SECRET_KEY = "test-bedrock-secret";

const result = getModel(userAi);
expect(result.provider).toBe(Provider.ANTHROPIC);
expect(result.modelName).toBe(Model.CLAUDE_3_7_SONNET_BEDROCK);
expect(result.provider).toBe(Provider.BEDROCK);
expect(result.modelName).toBe(
"us.anthropic.claude-3-7-sonnet-20250219-v1:0",
);
expect(result.model).toBeDefined();
});

Expand Down Expand Up @@ -289,6 +296,10 @@ describe("Models", () => {
aiModel: null,
};

// Reset to default
vi.mocked(env).DEFAULT_LLM_PROVIDER = "openai";
vi.mocked(env).DEFAULT_LLM_MODEL = undefined;

const result = getModel(userAi, "default");
expect(result.provider).toBe(Provider.OPEN_AI);
expect(result.modelName).toBe("gpt-4o");
Expand Down
Loading
Loading