Skip to content

Commit c402014

Browse files
committed
fix: remove nectarine experimental model
- Removed nectarine-alpha-new-reasoning-effort-2025-07-25 from openai.ts - Removed nectarine handling from openai-native.ts (renamed to handleGpt5Message) - Removed associated changeset file - Keep GPT-5 models with developer role handling
1 parent 4a3a7ac commit c402014

File tree

3 files changed

+3
-26
lines changed

3 files changed

+3
-26
lines changed

.changeset/gpt5-support.md

Lines changed: 0 additions & 9 deletions
This file was deleted.

packages/types/src/providers/openai.ts

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -36,16 +36,6 @@ export const openAiNativeModels = {
3636
outputPrice: 0,
3737
cacheReadsPrice: 0,
3838
},
39-
"nectarine-alpha-new-reasoning-effort-2025-07-25": {
40-
maxTokens: 128000,
41-
contextWindow: 256000,
42-
supportsImages: true,
43-
supportsPromptCache: true,
44-
supportsReasoningEffort: true,
45-
inputPrice: 0,
46-
outputPrice: 0,
47-
cacheReadsPrice: 0,
48-
},
4939
"gpt-4.1": {
5040
maxTokens: 32_768,
5141
contextWindow: 1_047_576,

src/api/providers/openai-native.ts

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -53,8 +53,8 @@ export class OpenAiNativeHandler extends BaseProvider implements SingleCompletio
5353
yield* this.handleReasonerMessage(model, id, systemPrompt, messages)
5454
} else if (model.id.startsWith("o1")) {
5555
yield* this.handleO1FamilyMessage(model, systemPrompt, messages)
56-
} else if (this.isNectarineModel(model.id) || this.isGpt5Model(model.id)) {
57-
yield* this.handleNectarineMessage(model, systemPrompt, messages)
56+
} else if (this.isGpt5Model(model.id)) {
57+
yield* this.handleGpt5Message(model, systemPrompt, messages)
5858
} else {
5959
yield* this.handleDefaultModelMessage(model, systemPrompt, messages)
6060
}
@@ -131,7 +131,7 @@ export class OpenAiNativeHandler extends BaseProvider implements SingleCompletio
131131
yield* this.handleStreamResponse(stream, model)
132132
}
133133

134-
private async *handleNectarineMessage(
134+
private async *handleGpt5Message(
135135
model: OpenAiNativeModel,
136136
systemPrompt: string,
137137
messages: Anthropic.Messages.MessageParam[],
@@ -150,10 +150,6 @@ export class OpenAiNativeHandler extends BaseProvider implements SingleCompletio
150150
yield* this.handleStreamResponse(stream, model)
151151
}
152152

153-
private isNectarineModel(modelId: string): boolean {
154-
return modelId.includes("nectarine")
155-
}
156-
157153
private isGpt5Model(modelId: string): boolean {
158154
return modelId.startsWith("gpt-5")
159155
}

0 commit comments

Comments
 (0)