diff --git a/app/api/common.ts b/app/api/common.ts index 17b5f916533..9149cc6c9e7 100644 --- a/app/api/common.ts +++ b/app/api/common.ts @@ -7,7 +7,6 @@ import { ServiceProvider, } from "../constant"; import { isModelAvailableInServer } from "../utils/model"; -import { makeAzurePath } from "../azure"; const serverConfig = getServerSideConfig(); diff --git a/app/azure.ts b/app/azure.ts deleted file mode 100644 index 48406c55ba5..00000000000 --- a/app/azure.ts +++ /dev/null @@ -1,9 +0,0 @@ -export function makeAzurePath(path: string, apiVersion: string) { - // should omit /v1 prefix - path = path.replaceAll("v1/", ""); - - // should add api-key to query string - path += `${path.includes("?") ? "&" : "?"}api-version=${apiVersion}`; - - return path; -} diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index 25097e3baa6..56063d6f064 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -27,7 +27,6 @@ import { } from "@fortaine/fetch-event-source"; import { prettyObject } from "@/app/utils/format"; import { getClientConfig } from "@/app/config/client"; -import { makeAzurePath } from "@/app/azure"; import { getMessageTextContent, getMessageImages, @@ -65,33 +64,31 @@ export class ChatGPTApi implements LLMApi { let baseUrl = ""; + const isAzure = path.includes("deployments"); if (accessStore.useCustomConfig) { - const isAzure = accessStore.provider === ServiceProvider.Azure; - if (isAzure && !accessStore.isValidAzure()) { throw Error( "incomplete azure config, please check it in your settings page", ); } - if (isAzure) { - path = makeAzurePath(path, accessStore.azureApiVersion); - } - baseUrl = isAzure ? accessStore.azureUrl : accessStore.openaiUrl; } if (baseUrl.length === 0) { const isApp = !!getClientConfig()?.isApp; - baseUrl = isApp - ? DEFAULT_API_HOST + "/proxy" + ApiPath.OpenAI - : ApiPath.OpenAI; + const apiPath = isAzure ? ApiPath.Azure : ApiPath.OpenAI; + baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath; } if (baseUrl.endsWith("/")) { baseUrl = baseUrl.slice(0, baseUrl.length - 1); } - if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.OpenAI)) { + if ( + !baseUrl.startsWith("http") && + !isAzure && + !baseUrl.startsWith(ApiPath.OpenAI) + ) { baseUrl = "https://" + baseUrl; } @@ -100,15 +97,6 @@ export class ChatGPTApi implements LLMApi { return [baseUrl, path].join("/"); } - getBaseUrl(apiPath: string) { - const isApp = !!getClientConfig()?.isApp; - let baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath; - if (baseUrl.endsWith("/")) { - baseUrl = baseUrl.slice(0, baseUrl.length - 1); - } - return baseUrl + "/"; - } - extractMessage(res: any) { return res.choices?.at(0)?.message?.content ?? ""; } @@ -171,14 +159,14 @@ export class ChatGPTApi implements LLMApi { model.name == modelConfig.model && model?.provider.providerName == ServiceProvider.Azure, ); - chatPath = - this.getBaseUrl(ApiPath.Azure) + + chatPath = this.path( Azure.ChatPath( model?.displayName ?? model.name, useAccessStore.getState().azureApiVersion, - ); + ), + ); } else { - chatPath = this.getBaseUrl(ApiPath.OpenAI) + OpenaiPath.ChatPath; + chatPath = this.path(OpenaiPath.ChatPath); } const chatPayload = { method: "POST",