@@ -2,7 +2,7 @@ import { NextRequest, NextResponse } from "next/server";
2
2
import { getServerSideConfig } from "../config/server" ;
3
3
import { OPENAI_BASE_URL , ServiceProvider } from "../constant" ;
4
4
import { cloudflareAIGatewayUrl } from "../utils/cloudflare" ;
5
- import { getModelProvider , isModelAvailableInServer } from "../utils/model" ;
5
+ import { getModelProvider , isModelNotavailableInServer } from "../utils/model" ;
6
6
7
7
const serverConfig = getServerSideConfig ( ) ;
8
8
@@ -118,15 +118,14 @@ export async function requestOpenai(req: NextRequest) {
118
118
119
119
// not undefined and is false
120
120
if (
121
- isModelAvailableInServer (
121
+ isModelNotavailableInServer (
122
122
serverConfig . customModels ,
123
123
jsonBody ?. model as string ,
124
- ServiceProvider . OpenAI as string ,
125
- ) ||
126
- isModelAvailableInServer (
127
- serverConfig . customModels ,
128
- jsonBody ?. model as string ,
129
- ServiceProvider . Azure as string ,
124
+ [
125
+ ServiceProvider . OpenAI ,
126
+ ServiceProvider . Azure ,
127
+ jsonBody ?. model as string , // support provider-unspecified model
128
+ ] ,
130
129
)
131
130
) {
132
131
return NextResponse . json (
0 commit comments