diff --git a/src/llms/openai.ts b/src/llms/openai.ts index 08038f8..9d661c2 100644 --- a/src/llms/openai.ts +++ b/src/llms/openai.ts @@ -15,6 +15,7 @@ import { pipe, Observable, UnaryFunction } from "rxjs"; import { filter, map, scan, takeWhile } from "rxjs/operators"; import { LLM_PLUGIN_ID, LLM_PLUGIN_ROUTE } from "./constants"; +import { LLMAppHealthCheck } from "./types"; const OPENAI_CHAT_COMPLETIONS_PATH = 'openai/v1/chat/completions'; @@ -329,11 +330,12 @@ let loggedWarning = false; /** Check if the OpenAI API is enabled via the LLM plugin. */ export const enabled = async () => { + // Run a health check to see if the plugin is installed. + let response: LLMAppHealthCheck; try { - const settings = await getBackendSrv().get(`${LLM_PLUGIN_ROUTE}/settings`, undefined, undefined, { + response = await getBackendSrv().get(`${LLM_PLUGIN_ROUTE}/health`, undefined, undefined, { showSuccessAlert: false, showErrorAlert: false, }); - return settings.enabled && (settings?.secureJsonFields?.openAIKey ?? false); } catch (e) { if (!loggedWarning) { logDebug(String(e)); @@ -342,4 +344,7 @@ export const enabled = async () => { } return false; } + // If the plugin is installed then check if it is configured. + const { details } = response; + return details?.openAIEnabled ?? false; } diff --git a/src/llms/types.ts b/src/llms/types.ts new file mode 100644 index 0000000..28663fd --- /dev/null +++ b/src/llms/types.ts @@ -0,0 +1,6 @@ +export type LLMAppHealthCheck = { + details: { + openAIEnabled?: boolean; + vectorEnabled?: boolean; + }; +}; diff --git a/src/llms/vector.ts b/src/llms/vector.ts index 876f928..31af5db 100644 --- a/src/llms/vector.ts +++ b/src/llms/vector.ts @@ -8,8 +8,9 @@ * The {@link enabled} function can be used to check if the plugin is enabled and configured. */ -import { FetchError, getBackendSrv, logDebug } from "@grafana/runtime"; +import { getBackendSrv, logDebug } from "@grafana/runtime"; import { LLM_PLUGIN_ROUTE } from "./constants"; +import { LLMAppHealthCheck } from "./types"; interface SearchResultPayload extends Record { } @@ -74,10 +75,10 @@ let loggedWarning = false; /** Check if the vector API is enabled and configured via the LLM plugin. */ export const enabled = async () => { - // Start by checking settings. If the plugin is not installed then this will fail. - let settings; + // Run a health check to see if the plugin is installed. + let response: LLMAppHealthCheck; try { - settings = await getBackendSrv().get(`${LLM_PLUGIN_ROUTE}/settings`, undefined, undefined, { + response = await getBackendSrv().get(`${LLM_PLUGIN_ROUTE}/health`, undefined, undefined, { showSuccessAlert: false, showErrorAlert: false, }); } catch (e) { @@ -88,50 +89,7 @@ export const enabled = async () => { } return false; } - // If the plugin is installed then check if it is enabled and configured. - const { enabled, jsonData } = settings; - const enabledInSettings: boolean = ( - enabled && - (jsonData.vector?.enabled ?? false) && - (jsonData.vector?.embed?.type ?? false) && - (jsonData.vector.store.type ?? false) - ); - if (!enabledInSettings) { - logDebug('Vector service is not enabled, or not configured, in Grafana LLM plugin settings. Configure the grafana-llm-app plugin to enable vector search.'); - return false; - } - // Finally, check if the vector search API is available. - try { - await getBackendSrv().get(`${LLM_PLUGIN_ROUTE}/resources/vector/search`, undefined, undefined, { - responseType: "text", - showSuccessAlert: false, - showErrorAlert: false, - }); - return true; - } catch (e: unknown) { - // If we've got this far then the call to /settings has succeeded, so the plugin is definitely - // installed. A 404 then means that the plugin version is not recent enough to have the - // vector search API. - if ((e as FetchError).status === 404) { - if (!loggedWarning) { - logDebug(String(e)); - logDebug('Vector service is enabled, but the Grafana LLM plugin is not up-to-date. Update the grafana-llm-app plugin to enable vector search.'); - loggedWarning = true; - } - } - // Backend sends 503 Service Unavailable if vector is not enabled or configured properly. - if ((e as FetchError).status === 503) { - if (!loggedWarning) { - logDebug(String(e)); - logDebug('Vector service is not enabled, or not configured, in Grafana LLM plugin settings. Configure the grafana-llm-app plugin to enable vector search.'); - loggedWarning = true; - } - } - // If the backend returns 405 Method Not Allowed then we've made it through to the - // handler, and it must be enabled. - if ((e as FetchError).status === 405) { - return true; - } - return false; - } + // If the plugin is installed then check if it is configured. + const { details } = response; + return details.vectorEnabled ?? false; };