-
Notifications
You must be signed in to change notification settings - Fork 2.5k
Add Z AI provider #6657
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
Merged
Add Z AI provider #6657
Changes from all commits
Commits
Show all changes
3 commits
Select commit
Hold shift + click to select a range
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,105 @@ | ||
| import type { ModelInfo } from "../model.js" | ||
|
|
||
| // Z AI | ||
| // https://docs.z.ai/guides/llm/glm-4.5 | ||
| // https://docs.z.ai/guides/overview/pricing | ||
jues marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
|
||
| export type InternationalZAiModelId = keyof typeof internationalZAiModels | ||
| export const internationalZAiDefaultModelId: InternationalZAiModelId = "glm-4.5" | ||
| export const internationalZAiModels = { | ||
| "glm-4.5": { | ||
| maxTokens: 98_304, | ||
| contextWindow: 131_072, | ||
| supportsImages: false, | ||
| supportsPromptCache: true, | ||
| inputPrice: 0.6, | ||
| outputPrice: 2.2, | ||
| cacheWritesPrice: 0, | ||
| cacheReadsPrice: 0.11, | ||
| description: | ||
| "GLM-4.5 is Zhipu's latest featured model. Its comprehensive capabilities in reasoning, coding, and agent reach the state-of-the-art (SOTA) level among open-source models, with a context length of up to 128k.", | ||
| }, | ||
| "glm-4.5-air": { | ||
| maxTokens: 98_304, | ||
| contextWindow: 131_072, | ||
| supportsImages: false, | ||
| supportsPromptCache: true, | ||
| inputPrice: 0.2, | ||
| outputPrice: 1.1, | ||
| cacheWritesPrice: 0, | ||
| cacheReadsPrice: 0.03, | ||
| description: | ||
| "GLM-4.5-Air is the lightweight version of GLM-4.5. It balances performance and cost-effectiveness, and can flexibly switch to hybrid thinking models.", | ||
| }, | ||
| } as const satisfies Record<string, ModelInfo> | ||
|
|
||
| export type MainlandZAiModelId = keyof typeof mainlandZAiModels | ||
| export const mainlandZAiDefaultModelId: MainlandZAiModelId = "glm-4.5" | ||
| export const mainlandZAiModels = { | ||
| "glm-4.5": { | ||
| maxTokens: 98_304, | ||
| contextWindow: 131_072, | ||
| supportsImages: false, | ||
| supportsPromptCache: true, | ||
| inputPrice: 0.29, | ||
| outputPrice: 1.14, | ||
| cacheWritesPrice: 0, | ||
| cacheReadsPrice: 0.057, | ||
| description: | ||
| "GLM-4.5 is Zhipu's latest featured model. Its comprehensive capabilities in reasoning, coding, and agent reach the state-of-the-art (SOTA) level among open-source models, with a context length of up to 128k.", | ||
| tiers: [ | ||
| { | ||
| contextWindow: 32_000, | ||
| inputPrice: 0.21, | ||
| outputPrice: 1.0, | ||
| cacheReadsPrice: 0.043, | ||
| }, | ||
| { | ||
| contextWindow: 128_000, | ||
| inputPrice: 0.29, | ||
| outputPrice: 1.14, | ||
| cacheReadsPrice: 0.057, | ||
| }, | ||
| { | ||
| contextWindow: Infinity, | ||
| inputPrice: 0.29, | ||
| outputPrice: 1.14, | ||
| cacheReadsPrice: 0.057, | ||
| }, | ||
| ], | ||
| }, | ||
| "glm-4.5-air": { | ||
| maxTokens: 98_304, | ||
| contextWindow: 131_072, | ||
| supportsImages: false, | ||
| supportsPromptCache: true, | ||
| inputPrice: 0.1, | ||
| outputPrice: 0.6, | ||
| cacheWritesPrice: 0, | ||
| cacheReadsPrice: 0.02, | ||
| description: | ||
| "GLM-4.5-Air is the lightweight version of GLM-4.5. It balances performance and cost-effectiveness, and can flexibly switch to hybrid thinking models.", | ||
| tiers: [ | ||
| { | ||
| contextWindow: 32_000, | ||
| inputPrice: 0.07, | ||
| outputPrice: 0.4, | ||
| cacheReadsPrice: 0.014, | ||
| }, | ||
| { | ||
| contextWindow: 128_000, | ||
| inputPrice: 0.1, | ||
| outputPrice: 0.6, | ||
| cacheReadsPrice: 0.02, | ||
| }, | ||
| { | ||
| contextWindow: Infinity, | ||
| inputPrice: 0.1, | ||
| outputPrice: 0.6, | ||
| cacheReadsPrice: 0.02, | ||
| }, | ||
| ], | ||
| }, | ||
| } as const satisfies Record<string, ModelInfo> | ||
|
|
||
| export const ZAI_DEFAULT_TEMPERATURE = 0 | ||
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,231 @@ | ||
| // npx vitest run src/api/providers/__tests__/zai.spec.ts | ||
|
|
||
| // Mock vscode first to avoid import errors | ||
| vitest.mock("vscode", () => ({})) | ||
|
|
||
| import OpenAI from "openai" | ||
| import { Anthropic } from "@anthropic-ai/sdk" | ||
|
|
||
| import { | ||
| type InternationalZAiModelId, | ||
| type MainlandZAiModelId, | ||
| internationalZAiDefaultModelId, | ||
| mainlandZAiDefaultModelId, | ||
| internationalZAiModels, | ||
| mainlandZAiModels, | ||
| ZAI_DEFAULT_TEMPERATURE, | ||
| } from "@roo-code/types" | ||
|
|
||
| import { ZAiHandler } from "../zai" | ||
|
|
||
| vitest.mock("openai", () => { | ||
| const createMock = vitest.fn() | ||
| return { | ||
| default: vitest.fn(() => ({ chat: { completions: { create: createMock } } })), | ||
| } | ||
| }) | ||
|
|
||
| describe("ZAiHandler", () => { | ||
| let handler: ZAiHandler | ||
| let mockCreate: any | ||
|
|
||
| beforeEach(() => { | ||
| vitest.clearAllMocks() | ||
| mockCreate = (OpenAI as unknown as any)().chat.completions.create | ||
| }) | ||
|
|
||
| describe("International Z AI", () => { | ||
| beforeEach(() => { | ||
| handler = new ZAiHandler({ zaiApiKey: "test-zai-api-key", zaiApiLine: "international" }) | ||
| }) | ||
|
|
||
| it("should use the correct international Z AI base URL", () => { | ||
| new ZAiHandler({ zaiApiKey: "test-zai-api-key", zaiApiLine: "international" }) | ||
| expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ baseURL: "https://api.z.ai/api/paas/v4" })) | ||
| }) | ||
|
|
||
| it("should use the provided API key for international", () => { | ||
| const zaiApiKey = "test-zai-api-key" | ||
| new ZAiHandler({ zaiApiKey, zaiApiLine: "international" }) | ||
| expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ apiKey: zaiApiKey })) | ||
| }) | ||
|
|
||
| it("should return international default model when no model is specified", () => { | ||
| const model = handler.getModel() | ||
| expect(model.id).toBe(internationalZAiDefaultModelId) | ||
| expect(model.info).toEqual(internationalZAiModels[internationalZAiDefaultModelId]) | ||
| }) | ||
|
|
||
| it("should return specified international model when valid model is provided", () => { | ||
| const testModelId: InternationalZAiModelId = "glm-4.5-air" | ||
| const handlerWithModel = new ZAiHandler({ | ||
| apiModelId: testModelId, | ||
| zaiApiKey: "test-zai-api-key", | ||
| zaiApiLine: "international", | ||
| }) | ||
| const model = handlerWithModel.getModel() | ||
| expect(model.id).toBe(testModelId) | ||
| expect(model.info).toEqual(internationalZAiModels[testModelId]) | ||
| }) | ||
| }) | ||
|
|
||
| describe("China Z AI", () => { | ||
| beforeEach(() => { | ||
| handler = new ZAiHandler({ zaiApiKey: "test-zai-api-key", zaiApiLine: "china" }) | ||
| }) | ||
|
|
||
| it("should use the correct China Z AI base URL", () => { | ||
| new ZAiHandler({ zaiApiKey: "test-zai-api-key", zaiApiLine: "china" }) | ||
| expect(OpenAI).toHaveBeenCalledWith( | ||
| expect.objectContaining({ baseURL: "https://open.bigmodel.cn/api/paas/v4" }), | ||
| ) | ||
| }) | ||
|
|
||
| it("should use the provided API key for China", () => { | ||
| const zaiApiKey = "test-zai-api-key" | ||
| new ZAiHandler({ zaiApiKey, zaiApiLine: "china" }) | ||
| expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ apiKey: zaiApiKey })) | ||
| }) | ||
|
|
||
| it("should return China default model when no model is specified", () => { | ||
| const model = handler.getModel() | ||
| expect(model.id).toBe(mainlandZAiDefaultModelId) | ||
| expect(model.info).toEqual(mainlandZAiModels[mainlandZAiDefaultModelId]) | ||
| }) | ||
|
|
||
| it("should return specified China model when valid model is provided", () => { | ||
| const testModelId: MainlandZAiModelId = "glm-4.5-air" | ||
| const handlerWithModel = new ZAiHandler({ | ||
| apiModelId: testModelId, | ||
| zaiApiKey: "test-zai-api-key", | ||
| zaiApiLine: "china", | ||
| }) | ||
| const model = handlerWithModel.getModel() | ||
| expect(model.id).toBe(testModelId) | ||
| expect(model.info).toEqual(mainlandZAiModels[testModelId]) | ||
| }) | ||
| }) | ||
|
|
||
| describe("Default behavior", () => { | ||
| it("should default to international when no zaiApiLine is specified", () => { | ||
| const handlerDefault = new ZAiHandler({ zaiApiKey: "test-zai-api-key" }) | ||
| expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ baseURL: "https://api.z.ai/api/paas/v4" })) | ||
|
|
||
| const model = handlerDefault.getModel() | ||
| expect(model.id).toBe(internationalZAiDefaultModelId) | ||
| expect(model.info).toEqual(internationalZAiModels[internationalZAiDefaultModelId]) | ||
| }) | ||
|
|
||
| it("should use 'not-provided' as default API key when none is specified", () => { | ||
| new ZAiHandler({ zaiApiLine: "international" }) | ||
| expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ apiKey: "not-provided" })) | ||
| }) | ||
| }) | ||
|
|
||
| describe("API Methods", () => { | ||
| beforeEach(() => { | ||
| handler = new ZAiHandler({ zaiApiKey: "test-zai-api-key", zaiApiLine: "international" }) | ||
| }) | ||
|
|
||
| it("completePrompt method should return text from Z AI API", async () => { | ||
| const expectedResponse = "This is a test response from Z AI" | ||
| mockCreate.mockResolvedValueOnce({ choices: [{ message: { content: expectedResponse } }] }) | ||
| const result = await handler.completePrompt("test prompt") | ||
| expect(result).toBe(expectedResponse) | ||
| }) | ||
|
|
||
| it("should handle errors in completePrompt", async () => { | ||
| const errorMessage = "Z AI API error" | ||
| mockCreate.mockRejectedValueOnce(new Error(errorMessage)) | ||
| await expect(handler.completePrompt("test prompt")).rejects.toThrow( | ||
| `Z AI completion error: ${errorMessage}`, | ||
| ) | ||
| }) | ||
|
|
||
| it("createMessage should yield text content from stream", async () => { | ||
| const testContent = "This is test content from Z AI stream" | ||
|
|
||
| mockCreate.mockImplementationOnce(() => { | ||
| return { | ||
| [Symbol.asyncIterator]: () => ({ | ||
| next: vitest | ||
| .fn() | ||
| .mockResolvedValueOnce({ | ||
| done: false, | ||
| value: { choices: [{ delta: { content: testContent } }] }, | ||
| }) | ||
| .mockResolvedValueOnce({ done: true }), | ||
| }), | ||
| } | ||
| }) | ||
|
|
||
| const stream = handler.createMessage("system prompt", []) | ||
| const firstChunk = await stream.next() | ||
|
|
||
| expect(firstChunk.done).toBe(false) | ||
| expect(firstChunk.value).toEqual({ type: "text", text: testContent }) | ||
| }) | ||
|
|
||
| it("createMessage should yield usage data from stream", async () => { | ||
| mockCreate.mockImplementationOnce(() => { | ||
| return { | ||
| [Symbol.asyncIterator]: () => ({ | ||
| next: vitest | ||
| .fn() | ||
| .mockResolvedValueOnce({ | ||
| done: false, | ||
| value: { | ||
| choices: [{ delta: {} }], | ||
| usage: { prompt_tokens: 10, completion_tokens: 20 }, | ||
| }, | ||
| }) | ||
| .mockResolvedValueOnce({ done: true }), | ||
| }), | ||
| } | ||
| }) | ||
|
|
||
| const stream = handler.createMessage("system prompt", []) | ||
| const firstChunk = await stream.next() | ||
|
|
||
| expect(firstChunk.done).toBe(false) | ||
| expect(firstChunk.value).toEqual({ type: "usage", inputTokens: 10, outputTokens: 20 }) | ||
| }) | ||
|
|
||
| it("createMessage should pass correct parameters to Z AI client", async () => { | ||
| const modelId: InternationalZAiModelId = "glm-4.5" | ||
| const modelInfo = internationalZAiModels[modelId] | ||
| const handlerWithModel = new ZAiHandler({ | ||
| apiModelId: modelId, | ||
| zaiApiKey: "test-zai-api-key", | ||
| zaiApiLine: "international", | ||
| }) | ||
|
|
||
| mockCreate.mockImplementationOnce(() => { | ||
| return { | ||
| [Symbol.asyncIterator]: () => ({ | ||
| async next() { | ||
| return { done: true } | ||
| }, | ||
| }), | ||
| } | ||
| }) | ||
|
|
||
| const systemPrompt = "Test system prompt for Z AI" | ||
| const messages: Anthropic.Messages.MessageParam[] = [{ role: "user", content: "Test message for Z AI" }] | ||
|
|
||
| const messageGenerator = handlerWithModel.createMessage(systemPrompt, messages) | ||
| await messageGenerator.next() | ||
|
|
||
| expect(mockCreate).toHaveBeenCalledWith( | ||
| expect.objectContaining({ | ||
| model: modelId, | ||
| max_tokens: modelInfo.maxTokens, | ||
| temperature: ZAI_DEFAULT_TEMPERATURE, | ||
| messages: expect.arrayContaining([{ role: "system", content: systemPrompt }]), | ||
| stream: true, | ||
| stream_options: { include_usage: true }, | ||
| }), | ||
| ) | ||
| }) | ||
| }) | ||
| }) |
Oops, something went wrong.
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.