Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 13 additions & 1 deletion src/api/providers/moonshot.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { moonshotModels, moonshotDefaultModelId } from "@roo-code/types"
import OpenAI from "openai"
import { moonshotModels, moonshotDefaultModelId, type ModelInfo } from "@roo-code/types"

import type { ApiHandlerOptions } from "../../shared/api"

Expand Down Expand Up @@ -36,4 +37,15 @@ export class MoonshotHandler extends OpenAiHandler {
cacheReadTokens: usage?.cached_tokens,
}
}

// Override to always include max_completion_tokens for Moonshot
protected override addMaxTokensIfNeeded(
requestOptions:
| OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming
| OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming,
modelInfo: ModelInfo,
): void {
// Moonshot use max_tokens instead of max_completion_tokens
requestOptions.max_tokens = this.options.modelMaxTokens || modelInfo.maxTokens
}
}
2 changes: 1 addition & 1 deletion src/api/providers/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -408,7 +408,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
* Note: max_tokens is deprecated in favor of max_completion_tokens as per OpenAI documentation
* O3 family models handle max_tokens separately in handleO3FamilyMessage
*/
private addMaxTokensIfNeeded(
protected addMaxTokensIfNeeded(
requestOptions:
| OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming
| OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming,
Expand Down
Loading