diff --git a/packages/types/src/global-settings.ts b/packages/types/src/global-settings.ts index c53efcd4e683..f01e25c4d4fc 100644 --- a/packages/types/src/global-settings.ts +++ b/packages/types/src/global-settings.ts @@ -231,6 +231,7 @@ export const SECRET_STATE_KEYS = [ "doubaoApiKey", "moonshotApiKey", "mistralApiKey", + "minimaxApiKey", "unboundApiKey", "requestyApiKey", "xaiApiKey", diff --git a/packages/types/src/provider-settings.ts b/packages/types/src/provider-settings.ts index f53d6b43acc0..e4e793afd145 100644 --- a/packages/types/src/provider-settings.ts +++ b/packages/types/src/provider-settings.ts @@ -24,6 +24,7 @@ import { vscodeLlmModels, xaiModels, internationalZAiModels, + minimaxModels, } from "./providers/index.js" /** @@ -132,6 +133,7 @@ export const providerNames = [ "groq", "mistral", "moonshot", + "minimax", "openai-native", "qwen-code", "roo", @@ -342,6 +344,13 @@ const moonshotSchema = apiModelIdProviderModelSchema.extend({ moonshotApiKey: z.string().optional(), }) +const minimaxSchema = apiModelIdProviderModelSchema.extend({ + minimaxBaseUrl: z + .union([z.literal("https://api.minimax.io/v1"), z.literal("https://api.minimaxi.com/v1")]) + .optional(), + minimaxApiKey: z.string().optional(), +}) + const unboundSchema = baseProviderSettingsSchema.extend({ unboundApiKey: z.string().optional(), unboundModelId: z.string().optional(), @@ -451,6 +460,7 @@ export const providerSettingsSchemaDiscriminated = z.discriminatedUnion("apiProv deepInfraSchema.merge(z.object({ apiProvider: z.literal("deepinfra") })), doubaoSchema.merge(z.object({ apiProvider: z.literal("doubao") })), moonshotSchema.merge(z.object({ apiProvider: z.literal("moonshot") })), + minimaxSchema.merge(z.object({ apiProvider: z.literal("minimax") })), unboundSchema.merge(z.object({ apiProvider: z.literal("unbound") })), requestySchema.merge(z.object({ apiProvider: z.literal("requesty") })), humanRelaySchema.merge(z.object({ apiProvider: z.literal("human-relay") })), @@ -493,6 +503,7 @@ export const providerSettingsSchema = z.object({ ...deepInfraSchema.shape, ...doubaoSchema.shape, ...moonshotSchema.shape, + ...minimaxSchema.shape, ...unboundSchema.shape, ...requestySchema.shape, ...humanRelaySchema.shape, @@ -588,6 +599,7 @@ export const modelIdKeysByProvider: Record = { "gemini-cli": "apiModelId", mistral: "apiModelId", moonshot: "apiModelId", + minimax: "apiModelId", deepseek: "apiModelId", deepinfra: "deepInfraModelId", doubao: "apiModelId", @@ -720,6 +732,11 @@ export const MODELS_BY_PROVIDER: Record< label: "Moonshot", models: Object.keys(moonshotModels), }, + minimax: { + id: "minimax", + label: "MiniMax", + models: Object.keys(minimaxModels), + }, "openai-native": { id: "openai-native", label: "OpenAI", diff --git a/packages/types/src/providers/index.ts b/packages/types/src/providers/index.ts index b2a0e60c59ed..e5eac72da6c2 100644 --- a/packages/types/src/providers/index.ts +++ b/packages/types/src/providers/index.ts @@ -33,3 +33,4 @@ export * from "./doubao.js" export * from "./vercel-ai-gateway.js" export * from "./zai.js" export * from "./deepinfra.js" +export * from "./minimax.js" diff --git a/packages/types/src/providers/minimax.ts b/packages/types/src/providers/minimax.ts new file mode 100644 index 000000000000..825431b48242 --- /dev/null +++ b/packages/types/src/providers/minimax.ts @@ -0,0 +1,24 @@ +import type { ModelInfo } from "../model.js" + +// Minimax +// https://www.minimax.io/platform/document/text_api_intro +// https://www.minimax.io/platform/document/pricing +export type MinimaxModelId = keyof typeof minimaxModels +export const minimaxDefaultModelId: MinimaxModelId = "MiniMax-M2" + +export const minimaxModels = { + "MiniMax-M2": { + maxTokens: 16_384, + contextWindow: 192_000, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0.3, + outputPrice: 1.2, + cacheWritesPrice: 0, + cacheReadsPrice: 0, + description: + "MiniMax M2, a model born for Agents and code, featuring Top-tier Coding Capabilities, Powerful Agentic Performance, and Ultimate Cost-Effectiveness & Speed.", + }, +} as const satisfies Record + +export const MINIMAX_DEFAULT_TEMPERATURE = 1.0 diff --git a/src/api/index.ts b/src/api/index.ts index 55dbcf693f33..ad6fbe6e201c 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -42,6 +42,7 @@ import { FeatherlessHandler, VercelAiGatewayHandler, DeepInfraHandler, + MiniMaxHandler, } from "./providers" import { NativeOllamaHandler } from "./providers/native-ollama" @@ -172,6 +173,8 @@ export function buildApiHandler(configuration: ProviderSettings): ApiHandler { return new FeatherlessHandler(options) case "vercel-ai-gateway": return new VercelAiGatewayHandler(options) + case "minimax": + return new MiniMaxHandler(options) default: apiProvider satisfies "gemini-cli" | undefined return new AnthropicHandler(options) diff --git a/src/api/providers/__tests__/minimax.spec.ts b/src/api/providers/__tests__/minimax.spec.ts new file mode 100644 index 000000000000..c488aea8812d --- /dev/null +++ b/src/api/providers/__tests__/minimax.spec.ts @@ -0,0 +1,314 @@ +// npx vitest run src/api/providers/__tests__/minimax.spec.ts + +vitest.mock("vscode", () => ({ + workspace: { + getConfiguration: vitest.fn().mockReturnValue({ + get: vitest.fn().mockReturnValue(600), // Default timeout in seconds + }), + }, +})) + +import OpenAI from "openai" +import { Anthropic } from "@anthropic-ai/sdk" + +import { type MinimaxModelId, minimaxDefaultModelId, minimaxModels } from "@roo-code/types" + +import { MiniMaxHandler } from "../minimax" + +vitest.mock("openai", () => { + const createMock = vitest.fn() + return { + default: vitest.fn(() => ({ chat: { completions: { create: createMock } } })), + } +}) + +describe("MiniMaxHandler", () => { + let handler: MiniMaxHandler + let mockCreate: any + + beforeEach(() => { + vitest.clearAllMocks() + mockCreate = (OpenAI as unknown as any)().chat.completions.create + }) + + describe("International MiniMax (default)", () => { + beforeEach(() => { + handler = new MiniMaxHandler({ + minimaxApiKey: "test-minimax-api-key", + minimaxBaseUrl: "https://api.minimax.io/v1", + }) + }) + + it("should use the correct international MiniMax base URL by default", () => { + new MiniMaxHandler({ minimaxApiKey: "test-minimax-api-key" }) + expect(OpenAI).toHaveBeenCalledWith( + expect.objectContaining({ + baseURL: "https://api.minimax.io/v1", + }), + ) + }) + + it("should use the provided API key", () => { + const minimaxApiKey = "test-minimax-api-key" + new MiniMaxHandler({ minimaxApiKey }) + expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ apiKey: minimaxApiKey })) + }) + + it("should return default model when no model is specified", () => { + const model = handler.getModel() + expect(model.id).toBe(minimaxDefaultModelId) + expect(model.info).toEqual(minimaxModels[minimaxDefaultModelId]) + }) + + it("should return specified model when valid model is provided", () => { + const testModelId: MinimaxModelId = "MiniMax-M2" + const handlerWithModel = new MiniMaxHandler({ + apiModelId: testModelId, + minimaxApiKey: "test-minimax-api-key", + }) + const model = handlerWithModel.getModel() + expect(model.id).toBe(testModelId) + expect(model.info).toEqual(minimaxModels[testModelId]) + }) + + it("should return MiniMax-M2 model with correct configuration", () => { + const testModelId: MinimaxModelId = "MiniMax-M2" + const handlerWithModel = new MiniMaxHandler({ + apiModelId: testModelId, + minimaxApiKey: "test-minimax-api-key", + }) + const model = handlerWithModel.getModel() + expect(model.id).toBe(testModelId) + expect(model.info).toEqual(minimaxModels[testModelId]) + expect(model.info.contextWindow).toBe(192_000) + expect(model.info.maxTokens).toBe(16_384) + expect(model.info.supportsPromptCache).toBe(false) + }) + }) + + describe("China MiniMax", () => { + beforeEach(() => { + handler = new MiniMaxHandler({ + minimaxApiKey: "test-minimax-api-key", + minimaxBaseUrl: "https://api.minimaxi.com/v1", + }) + }) + + it("should use the correct China MiniMax base URL", () => { + new MiniMaxHandler({ + minimaxApiKey: "test-minimax-api-key", + minimaxBaseUrl: "https://api.minimaxi.com/v1", + }) + expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ baseURL: "https://api.minimaxi.com/v1" })) + }) + + it("should use the provided API key for China", () => { + const minimaxApiKey = "test-minimax-api-key" + new MiniMaxHandler({ minimaxApiKey, minimaxBaseUrl: "https://api.minimaxi.com/v1" }) + expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ apiKey: minimaxApiKey })) + }) + + it("should return default model when no model is specified", () => { + const model = handler.getModel() + expect(model.id).toBe(minimaxDefaultModelId) + expect(model.info).toEqual(minimaxModels[minimaxDefaultModelId]) + }) + }) + + describe("Default behavior", () => { + it("should default to international base URL when none is specified", () => { + const handlerDefault = new MiniMaxHandler({ minimaxApiKey: "test-minimax-api-key" }) + expect(OpenAI).toHaveBeenCalledWith( + expect.objectContaining({ + baseURL: "https://api.minimax.io/v1", + }), + ) + + const model = handlerDefault.getModel() + expect(model.id).toBe(minimaxDefaultModelId) + expect(model.info).toEqual(minimaxModels[minimaxDefaultModelId]) + }) + + it("should default to MiniMax-M2 model", () => { + const handlerDefault = new MiniMaxHandler({ minimaxApiKey: "test-minimax-api-key" }) + const model = handlerDefault.getModel() + expect(model.id).toBe("MiniMax-M2") + }) + }) + + describe("API Methods", () => { + beforeEach(() => { + handler = new MiniMaxHandler({ minimaxApiKey: "test-minimax-api-key" }) + }) + + it("completePrompt method should return text from MiniMax API", async () => { + const expectedResponse = "This is a test response from MiniMax" + mockCreate.mockResolvedValueOnce({ choices: [{ message: { content: expectedResponse } }] }) + const result = await handler.completePrompt("test prompt") + expect(result).toBe(expectedResponse) + }) + + it("should handle errors in completePrompt", async () => { + const errorMessage = "MiniMax API error" + mockCreate.mockRejectedValueOnce(new Error(errorMessage)) + await expect(handler.completePrompt("test prompt")).rejects.toThrow() + }) + + it("createMessage should yield text content from stream", async () => { + const testContent = "This is test content from MiniMax stream" + + mockCreate.mockImplementationOnce(() => { + return { + [Symbol.asyncIterator]: () => ({ + next: vitest + .fn() + .mockResolvedValueOnce({ + done: false, + value: { choices: [{ delta: { content: testContent } }] }, + }) + .mockResolvedValueOnce({ done: true }), + }), + } + }) + + const stream = handler.createMessage("system prompt", []) + const firstChunk = await stream.next() + + expect(firstChunk.done).toBe(false) + expect(firstChunk.value).toEqual({ type: "text", text: testContent }) + }) + + it("should handle reasoning tags () from stream", async () => { + mockCreate.mockImplementationOnce(() => { + return { + [Symbol.asyncIterator]: () => ({ + next: vitest + .fn() + .mockResolvedValueOnce({ + done: false, + value: { choices: [{ delta: { content: "Let me think" } }] }, + }) + .mockResolvedValueOnce({ + done: false, + value: { choices: [{ delta: { content: " about this" } }] }, + }) + .mockResolvedValueOnce({ + done: false, + value: { choices: [{ delta: { content: "The answer is 42" } }] }, + }) + .mockResolvedValueOnce({ done: true }), + }), + } + }) + + const stream = handler.createMessage("system prompt", []) + const chunks = [] + for await (const chunk of stream) { + chunks.push(chunk) + } + + // XmlMatcher yields chunks as they're processed + expect(chunks).toEqual([ + { type: "reasoning", text: "Let me think" }, + { type: "reasoning", text: " about this" }, + { type: "text", text: "The answer is 42" }, + ]) + }) + + it("createMessage should yield usage data from stream", async () => { + mockCreate.mockImplementationOnce(() => { + return { + [Symbol.asyncIterator]: () => ({ + next: vitest + .fn() + .mockResolvedValueOnce({ + done: false, + value: { + choices: [{ delta: {} }], + usage: { prompt_tokens: 10, completion_tokens: 20 }, + }, + }) + .mockResolvedValueOnce({ done: true }), + }), + } + }) + + const stream = handler.createMessage("system prompt", []) + const firstChunk = await stream.next() + + expect(firstChunk.done).toBe(false) + expect(firstChunk.value).toEqual({ type: "usage", inputTokens: 10, outputTokens: 20 }) + }) + + it("createMessage should pass correct parameters to MiniMax client", async () => { + const modelId: MinimaxModelId = "MiniMax-M2" + const modelInfo = minimaxModels[modelId] + const handlerWithModel = new MiniMaxHandler({ + apiModelId: modelId, + minimaxApiKey: "test-minimax-api-key", + }) + + mockCreate.mockImplementationOnce(() => { + return { + [Symbol.asyncIterator]: () => ({ + async next() { + return { done: true } + }, + }), + } + }) + + const systemPrompt = "Test system prompt for MiniMax" + const messages: Anthropic.Messages.MessageParam[] = [{ role: "user", content: "Test message for MiniMax" }] + + const messageGenerator = handlerWithModel.createMessage(systemPrompt, messages) + await messageGenerator.next() + + expect(mockCreate).toHaveBeenCalledWith( + expect.objectContaining({ + model: modelId, + max_tokens: Math.min(modelInfo.maxTokens, Math.ceil(modelInfo.contextWindow * 0.2)), + temperature: 1, + messages: expect.arrayContaining([{ role: "system", content: systemPrompt }]), + stream: true, + stream_options: { include_usage: true }, + }), + undefined, + ) + }) + + it("should use temperature 1 by default", async () => { + mockCreate.mockImplementationOnce(() => { + return { + [Symbol.asyncIterator]: () => ({ + async next() { + return { done: true } + }, + }), + } + }) + + const messageGenerator = handler.createMessage("test", []) + await messageGenerator.next() + + expect(mockCreate).toHaveBeenCalledWith( + expect.objectContaining({ + temperature: 1, + }), + undefined, + ) + }) + }) + + describe("Model Configuration", () => { + it("should correctly configure MiniMax-M2 model properties", () => { + const model = minimaxModels["MiniMax-M2"] + expect(model.maxTokens).toBe(16_384) + expect(model.contextWindow).toBe(192_000) + expect(model.supportsImages).toBe(false) + expect(model.supportsPromptCache).toBe(false) + expect(model.inputPrice).toBe(0.3) + expect(model.outputPrice).toBe(1.2) + }) + }) +}) diff --git a/src/api/providers/index.ts b/src/api/providers/index.ts index 86cda7cf1301..de20971f24b1 100644 --- a/src/api/providers/index.ts +++ b/src/api/providers/index.ts @@ -36,3 +36,4 @@ export { RooHandler } from "./roo" export { FeatherlessHandler } from "./featherless" export { VercelAiGatewayHandler } from "./vercel-ai-gateway" export { DeepInfraHandler } from "./deepinfra" +export { MiniMaxHandler } from "./minimax" diff --git a/src/api/providers/minimax.ts b/src/api/providers/minimax.ts new file mode 100644 index 000000000000..23722f597648 --- /dev/null +++ b/src/api/providers/minimax.ts @@ -0,0 +1,62 @@ +import { Anthropic } from "@anthropic-ai/sdk" +import { type MinimaxModelId, minimaxDefaultModelId, minimaxModels } from "@roo-code/types" + +import type { ApiHandlerOptions } from "../../shared/api" +import { XmlMatcher } from "../../utils/xml-matcher" +import { ApiStream } from "../transform/stream" +import type { ApiHandlerCreateMessageMetadata } from "../index" + +import { BaseOpenAiCompatibleProvider } from "./base-openai-compatible-provider" + +export class MiniMaxHandler extends BaseOpenAiCompatibleProvider { + constructor(options: ApiHandlerOptions) { + super({ + ...options, + providerName: "MiniMax", + baseURL: options.minimaxBaseUrl ?? "https://api.minimax.io/v1", + apiKey: options.minimaxApiKey, + defaultProviderModelId: minimaxDefaultModelId, + providerModels: minimaxModels, + defaultTemperature: 1.0, + }) + } + + override async *createMessage( + systemPrompt: string, + messages: Anthropic.Messages.MessageParam[], + metadata?: ApiHandlerCreateMessageMetadata, + ): ApiStream { + const stream = await this.createStream(systemPrompt, messages, metadata) + + const matcher = new XmlMatcher( + "think", + (chunk) => + ({ + type: chunk.matched ? "reasoning" : "text", + text: chunk.data, + }) as const, + ) + + for await (const chunk of stream) { + const delta = chunk.choices[0]?.delta + + if (delta?.content) { + for (const matcherChunk of matcher.update(delta.content)) { + yield matcherChunk + } + } + + if (chunk.usage) { + yield { + type: "usage", + inputTokens: chunk.usage.prompt_tokens || 0, + outputTokens: chunk.usage.completion_tokens || 0, + } + } + } + + for (const chunk of matcher.final()) { + yield chunk + } + } +} diff --git a/webview-ui/src/components/settings/ApiOptions.tsx b/webview-ui/src/components/settings/ApiOptions.tsx index 5a35cdfca5a6..bd617bae10fc 100644 --- a/webview-ui/src/components/settings/ApiOptions.tsx +++ b/webview-ui/src/components/settings/ApiOptions.tsx @@ -39,6 +39,7 @@ import { rooDefaultModelId, vercelAiGatewayDefaultModelId, deepInfraDefaultModelId, + minimaxDefaultModelId, } from "@roo-code/types" import { vscode } from "@src/utils/vscode" @@ -101,6 +102,7 @@ import { VercelAiGateway, DeepInfra, GeminiCli, + MiniMax, } from "./providers" import { MODELS_BY_PROVIDER, PROVIDERS } from "./constants" @@ -364,6 +366,7 @@ const ApiOptions = ({ deepseek: { field: "apiModelId", default: deepSeekDefaultModelId }, doubao: { field: "apiModelId", default: doubaoDefaultModelId }, moonshot: { field: "apiModelId", default: moonshotDefaultModelId }, + minimax: { field: "apiModelId", default: minimaxDefaultModelId }, mistral: { field: "apiModelId", default: mistralDefaultModelId }, xai: { field: "apiModelId", default: xaiDefaultModelId }, groq: { field: "apiModelId", default: groqDefaultModelId }, @@ -655,6 +658,10 @@ const ApiOptions = ({ )} + {selectedProvider === "minimax" && ( + + )} + {selectedProvider === "vscode-lm" && ( )} diff --git a/webview-ui/src/components/settings/constants.ts b/webview-ui/src/components/settings/constants.ts index 0fe58d7fc257..315ea1ac95b7 100644 --- a/webview-ui/src/components/settings/constants.ts +++ b/webview-ui/src/components/settings/constants.ts @@ -21,6 +21,7 @@ import { internationalZAiModels, fireworksModels, featherlessModels, + minimaxModels, } from "@roo-code/types" export const MODELS_BY_PROVIDER: Partial>> = { @@ -44,6 +45,7 @@ export const MODELS_BY_PROVIDER: Partial a.label.localeCompare(b.label)) diff --git a/webview-ui/src/components/settings/providers/MiniMax.tsx b/webview-ui/src/components/settings/providers/MiniMax.tsx new file mode 100644 index 000000000000..4055be7d1791 --- /dev/null +++ b/webview-ui/src/components/settings/providers/MiniMax.tsx @@ -0,0 +1,73 @@ +import { useCallback } from "react" +import { VSCodeTextField, VSCodeDropdown, VSCodeOption } from "@vscode/webview-ui-toolkit/react" + +import type { ProviderSettings } from "@roo-code/types" + +import { useAppTranslation } from "@src/i18n/TranslationContext" +import { VSCodeButtonLink } from "@src/components/common/VSCodeButtonLink" + +import { inputEventTransform } from "../transforms" +import { cn } from "@/lib/utils" + +type MiniMaxProps = { + apiConfiguration: ProviderSettings + setApiConfigurationField: (field: keyof ProviderSettings, value: ProviderSettings[keyof ProviderSettings]) => void +} + +export const MiniMax = ({ apiConfiguration, setApiConfigurationField }: MiniMaxProps) => { + const { t } = useAppTranslation() + + const handleInputChange = useCallback( + ( + field: K, + transform: (event: E) => ProviderSettings[K] = inputEventTransform, + ) => + (event: E | Event) => { + setApiConfigurationField(field, transform(event as E)) + }, + [setApiConfigurationField], + ) + + return ( + <> +
+ + + + api.minimax.io + + + api.minimaxi.com + + +
+
+ + + +
+ {t("settings:providers.apiKeyStorageNotice")} +
+ {!apiConfiguration?.minimaxApiKey && ( + + {t("settings:providers.getMiniMaxApiKey")} + + )} +
+ + ) +} diff --git a/webview-ui/src/components/settings/providers/index.ts b/webview-ui/src/components/settings/providers/index.ts index 6e677a87825e..3969ba76c196 100644 --- a/webview-ui/src/components/settings/providers/index.ts +++ b/webview-ui/src/components/settings/providers/index.ts @@ -33,3 +33,4 @@ export { Fireworks } from "./Fireworks" export { Featherless } from "./Featherless" export { VercelAiGateway } from "./VercelAiGateway" export { DeepInfra } from "./DeepInfra" +export { MiniMax } from "./MiniMax" diff --git a/webview-ui/src/components/ui/hooks/useSelectedModel.ts b/webview-ui/src/components/ui/hooks/useSelectedModel.ts index 1cc8f507bb3b..7e0cda9ef2cf 100644 --- a/webview-ui/src/components/ui/hooks/useSelectedModel.ts +++ b/webview-ui/src/components/ui/hooks/useSelectedModel.ts @@ -12,6 +12,8 @@ import { deepSeekModels, moonshotDefaultModelId, moonshotModels, + minimaxDefaultModelId, + minimaxModels, geminiDefaultModelId, geminiModels, mistralDefaultModelId, @@ -272,6 +274,11 @@ function getSelectedModel({ const info = moonshotModels[id as keyof typeof moonshotModels] return { id, info } } + case "minimax": { + const id = apiConfiguration.apiModelId ?? minimaxDefaultModelId + const info = minimaxModels[id as keyof typeof minimaxModels] + return { id, info } + } case "zai": { const isChina = apiConfiguration.zaiApiLine === "china_coding" const models = isChina ? mainlandZAiModels : internationalZAiModels diff --git a/webview-ui/src/i18n/locales/en/settings.json b/webview-ui/src/i18n/locales/en/settings.json index 8ac41b14df0e..c6692fb15b62 100644 --- a/webview-ui/src/i18n/locales/en/settings.json +++ b/webview-ui/src/i18n/locales/en/settings.json @@ -292,6 +292,9 @@ "moonshotApiKey": "Moonshot API Key", "getMoonshotApiKey": "Get Moonshot API Key", "moonshotBaseUrl": "Moonshot Entrypoint", + "minimaxApiKey": "MiniMax API Key", + "getMiniMaxApiKey": "Get MiniMax API Key", + "minimaxBaseUrl": "MiniMax Entrypoint", "zaiApiKey": "Z AI API Key", "getZaiApiKey": "Get Z AI API Key", "zaiEntrypoint": "Z AI Entrypoint", diff --git a/webview-ui/src/i18n/locales/zh-CN/settings.json b/webview-ui/src/i18n/locales/zh-CN/settings.json index 1b84341ba6c5..9573ed4a2c37 100644 --- a/webview-ui/src/i18n/locales/zh-CN/settings.json +++ b/webview-ui/src/i18n/locales/zh-CN/settings.json @@ -293,6 +293,9 @@ "moonshotApiKey": "Moonshot API 密钥", "getMoonshotApiKey": "获取 Moonshot API 密钥", "moonshotBaseUrl": "Moonshot 服务站点", + "minimaxApiKey": "MiniMax API 密钥", + "getMiniMaxApiKey": "获取 MiniMax API 密钥", + "minimaxBaseUrl": "MiniMax 服务站点", "zaiApiKey": "Z AI API 密钥", "getZaiApiKey": "获取 Z AI API 密钥", "zaiEntrypoint": "Z AI 服务站点", diff --git a/webview-ui/src/i18n/locales/zh-TW/settings.json b/webview-ui/src/i18n/locales/zh-TW/settings.json index c667809244a7..c0e524c24051 100644 --- a/webview-ui/src/i18n/locales/zh-TW/settings.json +++ b/webview-ui/src/i18n/locales/zh-TW/settings.json @@ -297,6 +297,9 @@ "getZaiApiKey": "取得 Z AI API 金鑰", "zaiEntrypoint": "Z AI 服務端點", "zaiEntrypointDescription": "請根據您的位置選擇適當的 API 服務端點。如果您在中國,請選擇 open.bigmodel.cn。否則,請選擇 api.z.ai。", + "minimaxApiKey": "MiniMax API 金鑰", + "getMiniMaxApiKey": "取得 MiniMax API 金鑰", + "minimaxBaseUrl": "MiniMax 服務端點", "geminiApiKey": "Gemini API 金鑰", "getGroqApiKey": "取得 Groq API 金鑰", "groqApiKey": "Groq API 金鑰",