diff --git a/packages/kbn-lock-manager/src/lock_manager_client.ts b/packages/kbn-lock-manager/src/lock_manager_client.ts index ebc0a8f2c7e7f..b2fcc4d6a4cd1 100644 --- a/packages/kbn-lock-manager/src/lock_manager_client.ts +++ b/packages/kbn-lock-manager/src/lock_manager_client.ts @@ -256,6 +256,19 @@ export class LockManager { } } +export async function getLock({ + esClient, + logger, + lockId, +}: { + esClient: ElasticsearchClient; + logger: Logger; + lockId: LockId; +}): Promise { + const lockManager = new LockManager(lockId, esClient, logger); + return lockManager.get(); +} + export async function withLock( { esClient, @@ -280,9 +293,7 @@ export async function withLock( // extend the ttl periodically const extendInterval = Math.floor(ttl / 4); - logger.debug( - `Lock "${lockId}" acquired. Extending TTL every ${prettyMilliseconds(extendInterval)}` - ); + logger.debug(`Extending TTL for lock "${lockId}" every ${prettyMilliseconds(extendInterval)}`); let extendTTlPromise = Promise.resolve(true); const intervalId = setInterval(() => { diff --git a/packages/kbn-lock-manager/src/lock_manager_service.ts b/packages/kbn-lock-manager/src/lock_manager_service.ts index b7c03a43fd47c..f28061fb4ff7b 100644 --- a/packages/kbn-lock-manager/src/lock_manager_service.ts +++ b/packages/kbn-lock-manager/src/lock_manager_service.ts @@ -8,7 +8,7 @@ */ import { CoreSetup, Logger } from '@kbn/core/server'; -import { LockId, withLock } from './lock_manager_client'; +import { LockId, withLock, getLock } from './lock_manager_client'; export class LockManagerService { constructor(private readonly coreSetup: CoreSetup, private readonly logger: Logger) {} @@ -35,8 +35,16 @@ export class LockManagerService { ) { const [coreStart] = await this.coreSetup.getStartServices(); const esClient = coreStart.elasticsearch.client.asInternalUser; - const logger = this.logger.get('LockManager'); + const logger = this.logger.get('lock-manager'); return withLock({ esClient, logger, lockId, metadata }, callback); } + + async getLock(lockId: LockId) { + const [coreStart] = await this.coreSetup.getStartServices(); + const esClient = coreStart.elasticsearch.client.asInternalUser; + const logger = this.logger.get('lock-manager'); + + return getLock({ esClient, logger, lockId }); + } } diff --git a/src/dev/run_check_file_casing.ts b/src/dev/run_check_file_casing.ts index c0cdad2c60400..e9dfc28423213 100644 --- a/src/dev/run_check_file_casing.ts +++ b/src/dev/run_check_file_casing.ts @@ -34,6 +34,9 @@ run(async ({ log }) => { 'src/platform/**', 'x-pack/platform/**', 'x-pack/solutions/**', + + // ignore autogenerated snapshots + 'x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots', ], }); diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/knowledge_base_callout.tsx b/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/knowledge_base_callout.tsx deleted file mode 100644 index abb296713b2d2..0000000000000 --- a/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/knowledge_base_callout.tsx +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { - EuiFlexGroup, - EuiFlexItem, - EuiIcon, - EuiLink, - EuiLoadingSpinner, - EuiPanel, - EuiSpacer, - EuiText, -} from '@elastic/eui'; -import { i18n } from '@kbn/i18n'; -import { UseKnowledgeBaseResult } from '../hooks/use_knowledge_base'; - -export function KnowledgeBaseCallout({ knowledgeBase }: { knowledgeBase: UseKnowledgeBaseResult }) { - let content: React.ReactNode; - - let color: 'primary' | 'danger' | 'plain' = 'primary'; - - if (knowledgeBase.status.loading) { - content = ( - - - - - - - {i18n.translate('xpack.aiAssistant.checkingKbAvailability', { - defaultMessage: 'Checking availability of knowledge base', - })} - - - - ); - } else if (knowledgeBase.status.error) { - color = 'danger'; - content = ( - - {i18n.translate('xpack.aiAssistant.failedToGetStatus', { - defaultMessage: 'Failed to get model status.', - })} - - ); - } else if (knowledgeBase.status.value?.ready) { - color = 'plain'; - content = ( - - {' '} - {i18n.translate('xpack.aiAssistant.poweredByModel', { - defaultMessage: 'Powered by {model}', - values: { - model: 'ELSER', - }, - })} - - ); - } else if (knowledgeBase.isInstalling) { - color = 'primary'; - content = ( - - - - - - - {i18n.translate('xpack.aiAssistant.installingKb', { - defaultMessage: 'Setting up the knowledge base', - })} - - - - ); - } else if (knowledgeBase.installError) { - color = 'danger'; - content = ( - - {i18n.translate('xpack.aiAssistant.failedToSetupKnowledgeBase', { - defaultMessage: 'Failed to set up knowledge base.', - })} - - ); - } else if (!knowledgeBase.status.value?.ready && !knowledgeBase.status.error) { - content = ( - { - knowledgeBase.install(); - }} - > - - {' '} - {i18n.translate('xpack.aiAssistant.setupKb', { - defaultMessage: 'Improve your experience by setting up the knowledge base.', - })} - - - ); - } - - return ( - <> - {knowledgeBase.status.value?.ready ? null : } - - {content} - - - ); -} diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/welcome_message.tsx b/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/welcome_message.tsx index a4c92131a38a5..8c4eb5bcfcf1b 100644 --- a/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/welcome_message.tsx +++ b/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/welcome_message.tsx @@ -16,7 +16,7 @@ import type { UseKnowledgeBaseResult } from '../hooks/use_knowledge_base'; import type { UseGenAIConnectorsResult } from '../hooks/use_genai_connectors'; import { Disclaimer } from './disclaimer'; import { WelcomeMessageConnectors } from './welcome_message_connectors'; -import { WelcomeMessageKnowledgeBase } from './welcome_message_knowledge_base'; +import { WelcomeMessageKnowledgeBase } from '../knowledge_base/welcome_message_knowledge_base'; import { StarterPrompts } from './starter_prompts'; import { useKibana } from '../hooks/use_kibana'; @@ -60,10 +60,6 @@ export function WelcomeMessage({ if (isSupportedConnectorType(createdConnector.actionTypeId)) { connectors.reloadConnectors(); } - - if (!knowledgeBase.status.value || knowledgeBase.status.value?.ready === false) { - knowledgeBase.install(); - } }; const ConnectorFlyout = useMemo( diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/welcome_message_connectors.tsx b/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/welcome_message_connectors.tsx index af358c49f2c51..56bd4b2881877 100644 --- a/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/welcome_message_connectors.tsx +++ b/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/welcome_message_connectors.tsx @@ -21,7 +21,7 @@ import { euiThemeVars } from '@kbn/ui-theme'; import { isHttpFetchError } from '@kbn/core-http-browser'; import type { UseGenAIConnectorsResult } from '../hooks/use_genai_connectors'; -const fadeInAnimation = keyframes` +export const fadeInAnimation = keyframes` from { opacity: 0; } diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/welcome_message_knowledge_base.test.tsx b/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/welcome_message_knowledge_base.test.tsx deleted file mode 100644 index db29938ee6262..0000000000000 --- a/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/welcome_message_knowledge_base.test.tsx +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { act, render, screen } from '@testing-library/react'; - -import { WelcomeMessageKnowledgeBase } from './welcome_message_knowledge_base'; -import type { UseKnowledgeBaseResult } from '../hooks/use_knowledge_base'; - -describe('WelcomeMessageKnowledgeBase', () => { - afterEach(() => { - jest.clearAllMocks(); - }); - - function createMockKnowledgeBase( - partial: Partial = {} - ): UseKnowledgeBaseResult { - return { - isInstalling: partial.isInstalling ?? false, - install: partial.install ?? jest.fn(), - installError: partial.installError, - status: partial.status ?? { - value: { - ready: false, - enabled: true, - }, - loading: false, - error: undefined, - refresh: jest.fn(), - }, - }; - } - - function renderComponent(kb: UseKnowledgeBaseResult) { - return render(); - } - - it('renders install message if isInstalling', () => { - const kb = createMockKnowledgeBase({ isInstalling: true }); - renderComponent(kb); - - expect(screen.getByText(/We are setting up your knowledge base/i)).toBeInTheDocument(); - expect(screen.getByText(/Setting up Knowledge base/i)).toBeInTheDocument(); - }); - - it('renders the success banner after a transition from installing to not installing with no error', async () => { - // 1) Start in an installing state - let kb = createMockKnowledgeBase({ - isInstalling: true, - }); - const { rerender } = renderComponent(kb); - - // Should not see success banner initially - expect(screen.queryByText(/Knowledge base successfully installed/i)).toBeNull(); - - // 2) Transition to isInstalling = false, no installError - kb = { - ...kb, - isInstalling: false, - status: { - ...kb.status, - value: { - ...kb.status.value, - ready: true, - enabled: true, - }, - }, - }; - - await act(async () => { - rerender(); - }); - - // Now we should see success banner - expect(screen.getByText(/Knowledge base successfully installed/i)).toBeInTheDocument(); - }); - - it('renders "not set up" if installError is present', () => { - const kb = createMockKnowledgeBase({ - installError: new Error('model deployment failed'), - }); - renderComponent(kb); - - expect(screen.getByText(/Your Knowledge base hasn't been set up/i)).toBeInTheDocument(); - expect(screen.getByText(/Install Knowledge base/i)).toBeInTheDocument(); - // Because we have an installError, we also see "Inspect issues" button - expect(screen.getByText(/Inspect issues/i)).toBeInTheDocument(); - }); - - it('renders "not set up" if server returns errorMessage (no endpoint exists) but user hasnt started installing', () => { - // this happens when no endpoint exists because user has never installed - // which can happen for on prem users with preconfigured connector where /setup is not - // automatically called - const kb = createMockKnowledgeBase({ - isInstalling: false, - installError: undefined, - status: { - value: { - ready: false, - enabled: true, - errorMessage: 'no endpoint', - }, - loading: false, - refresh: jest.fn(), - }, - }); - renderComponent(kb); - - expect(screen.getByText(/Your Knowledge base hasn't been set up/i)).toBeInTheDocument(); - expect(screen.getByText(/Install Knowledge base/i)).toBeInTheDocument(); - expect(screen.queryByText(/Inspect issues/i)).toBeNull(); - }); - - it('renders "not set up" if model is not ready (but no errorMessage because endpoint exists)', () => { - // This could happen if the user manually stopped the model in ML, - // so we have no install error, but ready = false - const kb = createMockKnowledgeBase({ - isInstalling: false, - status: { - value: { - endpoint: {}, - ready: false, - enabled: true, - }, - loading: false, - error: undefined, - refresh: jest.fn(), - }, - }); - renderComponent(kb); - - expect(screen.getByText(/Your Knowledge base hasn't been set up/i)).toBeInTheDocument(); - expect(screen.getByText(/Install Knowledge base/i)).toBeInTheDocument(); - expect(screen.getByText(/Inspect issues/i)).toBeInTheDocument(); - }); - - it('renders nothing if the knowledge base is already installed', () => { - const kb = createMockKnowledgeBase({ - status: { - value: { - ready: true, - enabled: true, - errorMessage: undefined, - }, - loading: false, - error: undefined, - refresh: jest.fn(), - }, - }); - renderComponent(kb); - - expect(screen.queryByText(/We are setting up your knowledge base/i)).toBeNull(); - expect(screen.queryByText(/Your Knowledge base hasn't been set up/i)).toBeNull(); - expect(screen.queryByText(/Knowledge base successfully installed/i)).toBeNull(); - }); -}); diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/welcome_message_knowledge_base.tsx b/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/welcome_message_knowledge_base.tsx deleted file mode 100644 index 097a0ba15db7b..0000000000000 --- a/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/welcome_message_knowledge_base.tsx +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React, { useEffect, useState } from 'react'; -import { i18n } from '@kbn/i18n'; -import { - EuiButton, - EuiButtonEmpty, - EuiFlexGroup, - EuiFlexItem, - EuiIcon, - EuiPopover, - EuiSpacer, - EuiText, -} from '@elastic/eui'; -import usePrevious from 'react-use/lib/usePrevious'; - -import { WelcomeMessageKnowledgeBaseSetupErrorPanel } from './welcome_message_knowledge_base_setup_error_panel'; -import type { UseKnowledgeBaseResult } from '../hooks/use_knowledge_base'; - -export function WelcomeMessageKnowledgeBase({ - knowledgeBase, -}: { - knowledgeBase: UseKnowledgeBaseResult; -}) { - const prevIsInstalling = usePrevious(knowledgeBase.isInstalling); - const [showSuccessBanner, setShowSuccessBanner] = useState(false); - // track whether the "inspect issues" popover is open - const [isPopoverOpen, setIsPopoverOpen] = useState(false); - - useEffect(() => { - if ( - prevIsInstalling === true && - knowledgeBase.isInstalling === false && - !knowledgeBase.installError - ) { - setShowSuccessBanner(true); - } - }, [knowledgeBase.isInstalling, knowledgeBase.installError, prevIsInstalling]); - - const handleInstall = async () => { - setIsPopoverOpen(false); - await knowledgeBase.install(); - }; - - // If we are installing at any step (POST /setup + model deployment) - if (knowledgeBase.isInstalling) { - return ( - <> - - {i18n.translate('xpack.aiAssistant.welcomeMessage.weAreSettingUpTextLabel', { - defaultMessage: - 'We are setting up your knowledge base. This may take a few minutes. You can continue to use the Assistant while this process is underway.', - })} - - - - - {}} - > - {i18n.translate('xpack.aiAssistant.welcomeMessage.div.settingUpKnowledgeBaseLabel', { - defaultMessage: 'Setting up Knowledge base', - })} - - - ); - // not installing and install error or the endpoint doesn't exist or model not ready - } else if ( - knowledgeBase.installError || - knowledgeBase.status.value?.errorMessage || - !knowledgeBase.status.value?.ready - ) { - return ( - <> - - {i18n.translate( - 'xpack.aiAssistant.welcomeMessageKnowledgeBase.yourKnowledgeBaseIsNotSetUpCorrectlyLabel', - { defaultMessage: `Your Knowledge base hasn't been set up.` } - )} - - - - - - -
- - {i18n.translate('xpack.aiAssistant.welcomeMessage.retryButtonLabel', { - defaultMessage: 'Install Knowledge base', - })} - -
-
- { - // only show the "inspect issues" button if there is an install error - // or the model is not ready but endpoint exists - (knowledgeBase.installError || - (!knowledgeBase.status.value?.ready && knowledgeBase.status.value?.endpoint)) && ( - - setIsPopoverOpen(!isPopoverOpen)} - > - {i18n.translate( - 'xpack.aiAssistant.welcomeMessage.inspectErrorsButtonEmptyLabel', - { - defaultMessage: 'Inspect issues', - } - )} - - } - isOpen={isPopoverOpen} - panelPaddingSize="none" - closePopover={() => setIsPopoverOpen(false)} - > - - - - ) - } -
- - - - ); - } - - // successfull installation - if (showSuccessBanner) { - return ( -
- - - - - - - {i18n.translate( - 'xpack.aiAssistant.welcomeMessage.knowledgeBaseSuccessfullyInstalledLabel', - { defaultMessage: 'Knowledge base successfully installed' } - )} - - - -
- ); - } - - return null; -} diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/hooks/__storybook_mocks__/use_knowledge_base.ts b/x-pack/platform/packages/shared/kbn-ai-assistant/src/hooks/__storybook_mocks__/use_knowledge_base.ts index 8859cc716cc52..d91129b33fb9d 100644 --- a/x-pack/platform/packages/shared/kbn-ai-assistant/src/hooks/__storybook_mocks__/use_knowledge_base.ts +++ b/x-pack/platform/packages/shared/kbn-ai-assistant/src/hooks/__storybook_mocks__/use_knowledge_base.ts @@ -5,20 +5,27 @@ * 2.0. */ +import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/common'; import { UseKnowledgeBaseResult } from '../use_knowledge_base'; export function useKnowledgeBase(): UseKnowledgeBaseResult { return { - install: async () => {}, isInstalling: false, + isPolling: false, + install: async () => {}, status: { loading: false, refresh: () => {}, error: undefined, value: { - ready: true, + kbState: KnowledgeBaseState.NOT_INSTALLED, enabled: true, + concreteWriteIndex: undefined, + currentInferenceId: undefined, + isReIndexing: false, }, }, + warmupModel: async () => {}, + isWarmingUpModel: false, }; } diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/hooks/use_inference_endpoints.test.ts b/x-pack/platform/packages/shared/kbn-ai-assistant/src/hooks/use_inference_endpoints.test.ts new file mode 100644 index 0000000000000..44e459ff5d5a5 --- /dev/null +++ b/x-pack/platform/packages/shared/kbn-ai-assistant/src/hooks/use_inference_endpoints.test.ts @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { renderHook, act } from '@testing-library/react'; +import { useInferenceEndpoints } from './use_inference_endpoints'; +import { useAIAssistantAppService } from './use_ai_assistant_app_service'; + +jest.mock('./use_ai_assistant_app_service'); + +describe('useInferenceEndpoints', () => { + const mockCallApi = jest.fn(); + + beforeEach(() => { + jest.clearAllMocks(); + + (useAIAssistantAppService as jest.Mock).mockReturnValue({ + callApi: mockCallApi, + }); + }); + + it('fetches inference endpoints successfully on mount', async () => { + const mockResponse = { + endpoints: [ + { id: '1', name: 'Endpoint 1' }, + { id: '2', name: 'Endpoint 2' }, + ], + }; + + mockCallApi.mockResolvedValueOnce(mockResponse); + + const { result } = renderHook(() => useInferenceEndpoints()); + + await act(async () => { + await Promise.resolve(); + }); + + expect(mockCallApi).toHaveBeenCalledWith( + 'GET /internal/observability_ai_assistant/kb/inference_endpoints', + { + signal: expect.any(AbortSignal), + } + ); + + expect(result.current.inferenceEndpoints).toEqual(mockResponse.endpoints); + expect(result.current.isLoading).toBe(false); + expect(result.current.error).toBeUndefined(); + }); + + it('sets an error state on API errors', async () => { + const error = new Error('Something went wrong'); + mockCallApi.mockRejectedValueOnce(error); + + const { result } = renderHook(() => useInferenceEndpoints()); + + await act(async () => { + await Promise.resolve(); + }); + + expect(result.current.inferenceEndpoints).toEqual([]); + expect(result.current.isLoading).toBe(false); + expect(result.current.error).toEqual(error); + }); + + it('ignores AbortError and does not set error state', async () => { + const abortError = new DOMException('Aborted', 'AbortError'); + mockCallApi.mockRejectedValueOnce(abortError); + + const { result } = renderHook(() => useInferenceEndpoints()); + + await act(async () => { + await Promise.resolve(); + }); + + expect(result.current.inferenceEndpoints).toEqual([]); + expect(result.current.isLoading).toBe(false); + expect(result.current.error).toBeUndefined(); + }); +}); diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/hooks/use_inference_endpoints.ts b/x-pack/platform/packages/shared/kbn-ai-assistant/src/hooks/use_inference_endpoints.ts new file mode 100644 index 0000000000000..94d33bb08ef7f --- /dev/null +++ b/x-pack/platform/packages/shared/kbn-ai-assistant/src/hooks/use_inference_endpoints.ts @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { useEffect, useState, useMemo, useCallback } from 'react'; +import type { APIReturnType } from '@kbn/observability-ai-assistant-plugin/public'; +import { useAIAssistantAppService } from './use_ai_assistant_app_service'; + +export function useInferenceEndpoints() { + const service = useAIAssistantAppService(); + + const [inferenceEndpoints, setInferenceEndpoints] = useState< + APIReturnType<'GET /internal/observability_ai_assistant/kb/inference_endpoints'>['endpoints'] + >([]); + + const [isLoading, setIsLoading] = useState(true); + const [error, setError] = useState(undefined); + + const controller = useMemo(() => new AbortController(), []); + + const fetchInferenceEndpoints = useCallback(async () => { + setIsLoading(true); + + try { + const res = await service.callApi( + 'GET /internal/observability_ai_assistant/kb/inference_endpoints', + { + signal: controller.signal, + } + ); + + setInferenceEndpoints(res.endpoints); + setError(undefined); + } catch (err) { + if (err.name !== 'AbortError') { + setError(err as Error); + setInferenceEndpoints([]); + } + } finally { + setIsLoading(false); + } + }, [controller, service]); + + useEffect(() => { + fetchInferenceEndpoints(); + + return () => { + controller.abort(); + }; + }, [controller, fetchInferenceEndpoints]); + + return { inferenceEndpoints, isLoading, error }; +} diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/hooks/use_knowledge_base.test.tsx b/x-pack/platform/packages/shared/kbn-ai-assistant/src/hooks/use_knowledge_base.test.tsx index 6a1b484996d14..a281738d093f4 100644 --- a/x-pack/platform/packages/shared/kbn-ai-assistant/src/hooks/use_knowledge_base.test.tsx +++ b/x-pack/platform/packages/shared/kbn-ai-assistant/src/hooks/use_knowledge_base.test.tsx @@ -12,6 +12,9 @@ import { useAIAssistantAppService } from './use_ai_assistant_app_service'; jest.mock('./use_kibana'); jest.mock('./use_ai_assistant_app_service'); +jest.mock('p-retry', () => { + return (fn: () => Promise) => fn(); +}); describe('useKnowledgeBase', () => { const mockCallApi = jest.fn(); @@ -47,8 +50,6 @@ describe('useKnowledgeBase', () => { const { result } = renderHook(() => useKnowledgeBase()); expect(result.current.status.value).toBeUndefined(); - expect(result.current.isInstalling).toBe(false); - expect(result.current.installError).toBeUndefined(); }); it('calls GET /status once on mount', async () => { @@ -65,27 +66,47 @@ describe('useKnowledgeBase', () => { }); }); - it('install() handles error and sets installError state', async () => { - const error = new Error('Setup failed'); + it('calls install function', async () => { + const successResponse = { ready: true }; mockCallApi .mockResolvedValueOnce({ ready: false }) // Initial GET /status - .mockRejectedValueOnce(error); // POST /setup fails + .mockResolvedValueOnce(successResponse); // POST /setup succeeds const { result } = renderHook(() => useKnowledgeBase()); - // Trigger install + // Trigger setup act(() => { - result.current.install(); + result.current.install('.elser-2-elasticsearch'); }); - // Wait for error state + // Verify that the install was called await waitFor(() => { - expect(result.current.isInstalling).toBe(false); - expect(result.current.installError).toBe(error); + expect(mockCallApi).toHaveBeenCalledWith( + 'POST /internal/observability_ai_assistant/kb/setup', + { + params: { + query: { + inference_id: '.elser-2-elasticsearch', + }, + }, + signal: null, + } + ); + }); + }); + + it('shows an error toast on install failure', async () => { + const error = new Error('setup failed'); + + mockCallApi.mockResolvedValueOnce({ kbState: 'NOT_INSTALLED' }).mockRejectedValueOnce(error); + + const { result } = renderHook(() => useKnowledgeBase()); + + await act(async () => { + await result.current.install('failing-id'); }); - // Verify the error toast was shown - expect(mockAddError).toHaveBeenCalledWith(error, { + expect(mockAddError).toHaveBeenCalledWith(expect.any(Error), { title: expect.any(String), }); }); diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/hooks/use_knowledge_base.tsx b/x-pack/platform/packages/shared/kbn-ai-assistant/src/hooks/use_knowledge_base.tsx index 044dd13efa6d2..49a384e117b25 100644 --- a/x-pack/platform/packages/shared/kbn-ai-assistant/src/hooks/use_knowledge_base.tsx +++ b/x-pack/platform/packages/shared/kbn-ai-assistant/src/hooks/use_knowledge_base.tsx @@ -6,10 +6,12 @@ */ import { i18n } from '@kbn/i18n'; import { useCallback, useEffect, useState } from 'react'; +import pRetry from 'p-retry'; import { type AbortableAsyncState, useAbortableAsync, APIReturnType, + KnowledgeBaseState, } from '@kbn/observability-ai-assistant-plugin/public'; import { useKibana } from './use_kibana'; import { useAIAssistantAppService } from './use_ai_assistant_app_service'; @@ -17,8 +19,10 @@ import { useAIAssistantAppService } from './use_ai_assistant_app_service'; export interface UseKnowledgeBaseResult { status: AbortableAsyncState>; isInstalling: boolean; - installError?: Error; - install: () => Promise; + isPolling: boolean; + install: (inferenceId: string) => Promise; + warmupModel: (inferenceId: string) => Promise; + isWarmingUpModel: boolean; } export function useKnowledgeBase(): UseKnowledgeBaseResult { @@ -33,103 +37,115 @@ export function useKnowledgeBase(): UseKnowledgeBaseResult { ); const [isInstalling, setIsInstalling] = useState(false); + const [isWarmingUpModel, setIsWarmingUpModel] = useState(false); - const [installError, setInstallError] = useState(); - const [isPollingForDeployment, setIsPollingForDeployment] = useState(false); + // poll for status when installing, until install is complete and the KB is ready + const isPolling = + (isInstalling || isWarmingUpModel) && statusRequest.value?.kbState !== KnowledgeBaseState.READY; - const install = useCallback(async () => { - setIsInstalling(true); - setIsPollingForDeployment(false); - setInstallError(undefined); - - let attempts = 0; - const MAX_ATTEMPTS = 5; - - try { - // install - await retrySetupIfError(); - - if (ml.mlApi?.savedObjects.syncSavedObjects) { - await ml.mlApi.savedObjects.syncSavedObjects(); - } - - // do one refresh to get an initial status - await statusRequest.refresh(); - - // start polling for readiness - setIsPollingForDeployment(true); - } catch (e) { - setInstallError(e); - notifications!.toasts.addError(e, { - title: i18n.translate('xpack.aiAssistant.errorSettingUpInferenceEndpoint', { - defaultMessage: 'Could not create inference endpoint', - }), - }); + useEffect(() => { + // toggle installing state to false once KB is ready + if (isInstalling && statusRequest.value?.kbState === KnowledgeBaseState.READY) { setIsInstalling(false); } + }, [isInstalling, statusRequest]); - async function retrySetupIfError() { - while (true) { - try { - await service.callApi('POST /internal/observability_ai_assistant/kb/setup', { - signal: null, - }); - break; - } catch (error) { - if ( - (error.body?.statusCode === 503 || error.body?.statusCode === 504) && - attempts < MAX_ATTEMPTS - ) { - attempts++; - continue; + useEffect(() => { + // toggle warming up state to false once KB is ready + if (isWarmingUpModel && statusRequest.value?.kbState === KnowledgeBaseState.READY) { + setIsWarmingUpModel(false); + } + }, [isWarmingUpModel, statusRequest]); + + const install = useCallback( + async (inferenceId: string) => { + setIsInstalling(true); + try { + // Retry the setup with a maximum of 5 attempts + await pRetry( + async () => { + await service.callApi('POST /internal/observability_ai_assistant/kb/setup', { + params: { + query: { + inference_id: inferenceId, + }, + }, + signal: null, + }); + }, + { + retries: 5, } - throw error; + ); + if (ml.mlApi?.savedObjects.syncSavedObjects) { + await ml.mlApi.savedObjects.syncSavedObjects(); } + + // Refresh status after installation + statusRequest.refresh(); + } catch (error) { + notifications!.toasts.addError(error, { + title: i18n.translate('xpack.aiAssistant.errorSettingUpKnowledgeBase', { + defaultMessage: 'Could not setup knowledge base', + }), + }); } - } - }, [ml, service, notifications, statusRequest]); + }, + [ml, service, notifications, statusRequest] + ); + + const warmupModel = useCallback( + async (inferenceId: string) => { + setIsWarmingUpModel(true); + try { + await service.callApi('POST /internal/observability_ai_assistant/kb/warmup_model', { + params: { + query: { + inference_id: inferenceId, + }, + }, + signal: null, + }); + + // Refresh status after warming up model + statusRequest.refresh(); + } catch (error) { + notifications!.toasts.addError(error, { + title: i18n.translate('xpack.aiAssistant.errorWarmingupModel', { + defaultMessage: 'Could not warm up knowledge base model', + }), + }); + } + }, + [service, notifications, statusRequest] + ); - // poll the status if isPollingForDeployment === true - // stop when ready === true or some error + // poll the status if isPolling useEffect(() => { - if (!isPollingForDeployment) { + if (!isPolling) { return; } - const interval = setInterval(async () => { - // re-fetch /status - await statusRequest.refresh(); - const { value: currentStatus } = statusRequest; - - // check if the model is now ready - if (currentStatus?.ready) { - // done installing - setIsInstalling(false); - setIsPollingForDeployment(false); - clearInterval(interval); - return; - } + const interval = setInterval(statusRequest.refresh, 5000); - // if "deployment failed" state - if (currentStatus?.model_stats?.deployment_state === 'failed') { - setInstallError(new Error('model deployment failed')); - setIsInstalling(false); - setIsPollingForDeployment(false); - clearInterval(interval); - return; - } - }, 5000); + if (statusRequest.value?.kbState === KnowledgeBaseState.READY) { + // done installing + clearInterval(interval); + return; + } // cleanup the interval if unmount return () => { clearInterval(interval); }; - }, [isPollingForDeployment, statusRequest]); + }, [statusRequest, isPolling]); return { status: statusRequest, install, isInstalling, - installError, + isPolling, + warmupModel, + isWarmingUpModel, }; } diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/knowledge_base_installation_status_panel.tsx b/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/knowledge_base_installation_status_panel.tsx new file mode 100644 index 0000000000000..b7cd819600903 --- /dev/null +++ b/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/knowledge_base_installation_status_panel.tsx @@ -0,0 +1,166 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React, { useState } from 'react'; +import { + EuiButton, + EuiButtonEmpty, + EuiFlexGroup, + EuiFlexItem, + EuiPopover, + EuiSpacer, + EuiText, +} from '@elastic/eui'; +import { i18n } from '@kbn/i18n'; +import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/public'; +import { UseKnowledgeBaseResult } from '../hooks'; +import { WelcomeMessageKnowledgeBaseSetupErrorPanel } from './welcome_message_knowledge_base_setup_error_panel'; +import { SelectModelAndInstallKnowledgeBase } from './select_model_and_install_knowledge_base'; +import { SettingUpKnowledgeBase } from './setting_up_knowledge_base'; + +const WarmUpModel = ({ + knowledgeBase, + pendingDeployment = false, +}: { + knowledgeBase: UseKnowledgeBaseResult; + pendingDeployment?: boolean; +}) => { + const currentInferenceId = knowledgeBase.status.value?.endpoint?.inference_id; + + const handleWarmup = () => { + knowledgeBase.warmupModel(currentInferenceId!); + }; + + const label = ( + + {i18n.translate( + knowledgeBase.isWarmingUpModel + ? 'xpack.aiAssistant.welcomeMessage.redeployingKnowledgeBaseTextLabel' + : pendingDeployment + ? 'xpack.aiAssistant.welcomeMessage.knowledgeBaseStoppedTextLabel' + : 'xpack.aiAssistant.welcomeMessage.knowledgeBasePausedTextLabel', + { + defaultMessage: knowledgeBase.isWarmingUpModel + ? 'Re-deploying knowledge base model...' + : pendingDeployment + ? 'Your knowledge base model has been stopped' + : 'Knowledge base model paused due to inactivity.', + } + )} + + ); + + return ( + <> + {label} + + + + + {i18n.translate('xpack.aiAssistant.knowledgeBase.wakeUpKnowledgeBaseModel', { + defaultMessage: 'Re-deploy Model', + })} + + + + + ); +}; + +const InspectKnowledgeBasePopover = ({ + knowledgeBase, +}: { + knowledgeBase: UseKnowledgeBaseResult; +}) => { + const [isPopoverOpen, setIsPopoverOpen] = useState(false); + + const handleInstall = async (inferenceId: string) => { + setIsPopoverOpen(false); + await knowledgeBase.install(inferenceId); + }; + + return knowledgeBase.status.value?.modelStats ? ( + + setIsPopoverOpen(!isPopoverOpen)} + > + {i18n.translate('xpack.aiAssistant.welcomeMessage.inspectErrorsButtonEmptyLabel', { + defaultMessage: 'Inspect', + })} + + } + isOpen={isPopoverOpen} + panelPaddingSize="none" + closePopover={() => setIsPopoverOpen(false)} + > + + + + ) : null; +}; + +export const KnowledgeBaseInstallationStatusPanel = ({ + knowledgeBase, +}: { + knowledgeBase: UseKnowledgeBaseResult; +}) => { + switch (knowledgeBase.status.value?.kbState) { + case KnowledgeBaseState.NOT_INSTALLED: + return ( + <> + + + + + + ); + case KnowledgeBaseState.MODEL_PENDING_DEPLOYMENT: + return ; + case KnowledgeBaseState.DEPLOYING_MODEL: + return ( + <> + + + + ); + case KnowledgeBaseState.MODEL_PENDING_ALLOCATION: + return ; + case KnowledgeBaseState.ERROR: + return ( + <> + + {i18n.translate('xpack.aiAssistant.welcomeMessage.SettingUpFailTextLabel', { + defaultMessage: `Knowledge Base setup failed. Check 'Inspect' for details.`, + })} + + + + ); + default: + return null; + } +}; diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/select_model_and_install_knowledge_base.test.tsx b/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/select_model_and_install_knowledge_base.test.tsx new file mode 100644 index 0000000000000..33df67f46e369 --- /dev/null +++ b/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/select_model_and_install_knowledge_base.test.tsx @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; +import { render, screen, fireEvent, waitFor } from '@testing-library/react'; +import type { ModelOptionsData } from '../utils/get_model_options_for_inference_endpoints'; +import { SelectModelAndInstallKnowledgeBase } from './select_model_and_install_knowledge_base'; + +jest.mock('../hooks/use_inference_endpoints', () => ({ + useInferenceEndpoints: () => ({ + inferenceEndpoints: [{ inference_id: 'id1' }, { inference_id: 'id2' }], + isLoading: false, + }), +})); + +jest.mock('../utils/get_model_options_for_inference_endpoints', () => ({ + getModelOptionsForInferenceEndpoints: ({ endpoints }: { endpoints: any[] }): ModelOptionsData[] => + endpoints.map((e, i) => ({ + key: e.inference_id, + label: `Label${i + 1}`, + description: `Desc${i + 1}`, + })), +})); + +const onInstall = jest.fn(); + +function renderComponent() { + return render(); +} + +describe('SelectModelAndInstallKnowledgeBase', () => { + beforeEach(() => { + renderComponent(); + }); + + it('renders heading, subtitle, and the dropdown with a default model selected', () => { + expect(screen.getByRole('heading', { level: 3 })).toHaveTextContent( + 'Get started by setting up the Knowledge Base' + ); + + const learnMore = screen.getByRole('link', { name: /Learn more/i }); + expect(learnMore).toHaveAttribute('href', expect.stringContaining('ml-nlp-built-in-models')); + + expect(screen.getByText('Label1')).toBeInTheDocument(); + }); + + it('calls onInstall with default id when the install button is clicked', () => { + const installBtn = screen.getByRole('button', { name: /Install Knowledge Base/i }); + fireEvent.click(installBtn); + expect(onInstall).toHaveBeenCalledWith('id1'); + }); + + it('allows changing selection and installing the KB with the inference_id for the new model', async () => { + const defaultSelection = screen.getByText('Label1'); + fireEvent.click(defaultSelection); + + const nextSelection = screen.getByText('Label2'); + await waitFor(() => nextSelection); + fireEvent.click(nextSelection); + + expect(nextSelection).toBeInTheDocument(); + + fireEvent.click(screen.getByRole('button', { name: /Install Knowledge Base/i })); + expect(onInstall).toHaveBeenCalledWith('id2'); + }); +}); diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/select_model_and_install_knowledge_base.tsx b/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/select_model_and_install_knowledge_base.tsx new file mode 100644 index 0000000000000..6534cdfeca9a3 --- /dev/null +++ b/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/select_model_and_install_knowledge_base.tsx @@ -0,0 +1,179 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React, { useEffect, useState } from 'react'; +import { css } from '@emotion/css'; +import { + EuiButton, + EuiFlexGroup, + EuiFlexItem, + EuiIcon, + EuiLink, + EuiSpacer, + EuiSuperSelect, + EuiText, + euiCanAnimate, + useEuiTheme, +} from '@elastic/eui'; +import { i18n } from '@kbn/i18n'; +import { isHttpFetchError } from '@kbn/core-http-browser'; +import { useInferenceEndpoints } from '../hooks/use_inference_endpoints'; +import { + ModelOptionsData, + getModelOptionsForInferenceEndpoints, +} from '../utils/get_model_options_for_inference_endpoints'; +import { fadeInAnimation } from '../chat/welcome_message_connectors'; + +interface SelectModelAndInstallKnowledgeBaseProps { + onInstall: (inferenceId: string) => Promise; + isInstalling: boolean; +} + +export function SelectModelAndInstallKnowledgeBase({ + onInstall, + isInstalling, +}: SelectModelAndInstallKnowledgeBaseProps) { + const { euiTheme } = useEuiTheme(); + + const fadeInClassName = css` + ${euiCanAnimate} { + animation: ${fadeInAnimation} ${euiTheme.animation.normal} ease-in-out; + } + `; + + const [selectedInferenceId, setSelectedInferenceId] = useState(''); + + const { inferenceEndpoints, isLoading: isLoadingEndpoints, error } = useInferenceEndpoints(); + + useEffect(() => { + if (!selectedInferenceId && inferenceEndpoints.length) { + setSelectedInferenceId(inferenceEndpoints[0].inference_id); + } + }, [inferenceEndpoints, selectedInferenceId]); + + const handleInstall = () => { + if (selectedInferenceId) { + onInstall(selectedInferenceId); + } + }; + + const modelOptions: ModelOptionsData[] = getModelOptionsForInferenceEndpoints({ + endpoints: inferenceEndpoints, + }); + + const superSelectOptions = modelOptions.map((option: ModelOptionsData) => ({ + value: option.key, + inputDisplay: option.label, + dropdownDisplay: ( +
+ {option.label} + + {option.description} + +
+ ), + })); + + if (error) { + const isForbiddenError = + isHttpFetchError(error) && (error.body as { statusCode: number }).statusCode === 403; + + return ( +
+ + + + + + + {isForbiddenError + ? i18n.translate( + 'xpack.aiAssistant.knowledgeBase.inferenceEndpointsForbiddenTextLabel', + { + defaultMessage: 'Required privileges to fetch available models are missing', + } + ) + : i18n.translate( + 'xpack.aiAssistant.knowledgeBase.inferenceEndpointsErrorTextLabel', + { + defaultMessage: 'Could not load models', + } + )} + + + +
+ ); + } + + return ( + <> + +

+ {i18n.translate('xpack.aiAssistant.knowledgeBase.getStarted', { + defaultMessage: 'Get started by setting up the Knowledge Base', + })} +

+
+ + + + + {i18n.translate('xpack.aiAssistant.knowledgeBase.chooseModelSubtitle', { + defaultMessage: "Choose the default language model for the Assistant's responses.", + })}{' '} + + {i18n.translate('xpack.aiAssistant.knowledgeBase.subtitleLearnMore', { + defaultMessage: 'Learn more', + })} + + + + + + + + setSelectedInferenceId(value)} + disabled={isInstalling} + data-test-subj="observabilityAiAssistantKnowledgeBaseModelDropdown" + /> + + + + + + + + + {i18n.translate('xpack.aiAssistant.knowledgeBase.installButtonLabel', { + defaultMessage: 'Install Knowledge Base', + })} + + + + + ); +} diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/setting_up_knowledge_base.tsx b/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/setting_up_knowledge_base.tsx new file mode 100644 index 0000000000000..72f3851ad1632 --- /dev/null +++ b/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/setting_up_knowledge_base.tsx @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; +import { i18n } from '@kbn/i18n'; +import { EuiButtonEmpty, EuiSpacer, EuiText } from '@elastic/eui'; + +export const SettingUpKnowledgeBase = () => ( + <> + + {i18n.translate('xpack.aiAssistant.welcomeMessage.weAreSettingUpTextLabel', { + defaultMessage: + 'We are setting up your knowledge base. This may take a few minutes. You can continue to use the Assistant while this process is underway.', + })} + + + + + + {i18n.translate('xpack.aiAssistant.welcomeMessage.div.settingUpKnowledgeBaseLabel', { + defaultMessage: 'Setting up Knowledge Base', + })} + + +); diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/welcome_message_knowledge_base.test.tsx b/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/welcome_message_knowledge_base.test.tsx new file mode 100644 index 0000000000000..9864d31ab99f7 --- /dev/null +++ b/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/welcome_message_knowledge_base.test.tsx @@ -0,0 +1,278 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; +import { act, render, screen } from '@testing-library/react'; +import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/public'; +import { WelcomeMessageKnowledgeBase } from './welcome_message_knowledge_base'; +import type { UseKnowledgeBaseResult } from '../hooks/use_knowledge_base'; + +jest.mock('../hooks/use_inference_endpoints', () => ({ + useInferenceEndpoints: () => ({ + inferenceEndpoints: [{ inference_id: 'id1' }, { inference_id: 'id2' }], + isLoading: false, + }), +})); + +function createMockKnowledgeBase( + partial: Partial = {} +): UseKnowledgeBaseResult { + return { + isInstalling: partial.isInstalling ?? false, + isPolling: partial.isPolling ?? false, + install: partial.install ?? (async (_id: string) => {}), + warmupModel: partial.warmupModel ?? (async (_id: string) => {}), + isWarmingUpModel: partial.isWarmingUpModel ?? false, + status: partial.status ?? { + value: { + enabled: true, + errorMessage: undefined, + kbState: KnowledgeBaseState.NOT_INSTALLED, + concreteWriteIndex: undefined, + currentInferenceId: undefined, + isReIndexing: false, + }, + loading: false, + error: undefined, + refresh: jest.fn(), + }, + }; +} + +function renderComponent(kb: UseKnowledgeBaseResult) { + return render(); +} + +describe('WelcomeMessageKnowledgeBase', () => { + afterEach(() => { + jest.clearAllMocks(); + }); + + it(`renders the "not set up" state if server returns errorMessage (no model exists) but user hasn't started installing`, async () => { + const kb = createMockKnowledgeBase({ + isInstalling: false, + install: jest.fn(async (_id: string) => {}), + status: { + value: { + enabled: true, + kbState: KnowledgeBaseState.NOT_INSTALLED, + errorMessage: 'no model', + concreteWriteIndex: undefined, + currentInferenceId: undefined, + isReIndexing: false, + }, + loading: false, + refresh: jest.fn(), + }, + }); + + renderComponent(kb); + + expect(screen.getByText(/Get started by setting up the Knowledge Base/i)).toBeInTheDocument(); + expect(screen.getByText(/Install Knowledge Base/i)).toBeInTheDocument(); + expect(screen.queryByText(/Inspect/i)).toBeNull(); + }); + + it('renders install message if isInstalling', () => { + const kb = createMockKnowledgeBase({ + isInstalling: true, + status: { + value: { + enabled: true, + endpoint: { inference_id: 'inference_id' }, + kbState: KnowledgeBaseState.DEPLOYING_MODEL, + concreteWriteIndex: 'my-index', + currentInferenceId: 'inference_id', + isReIndexing: false, + }, + loading: false, + refresh: jest.fn(), + }, + }); + renderComponent(kb); + + expect(screen.getByText(/We are setting up your knowledge base/i)).toBeInTheDocument(); + expect(screen.getByText(/Setting up Knowledge base/i)).toBeInTheDocument(); + }); + + it('renders the success banner after a transition from installing to not installing with no error', async () => { + // 1) Start in an installing state + let kb = createMockKnowledgeBase({ + isInstalling: true, + isPolling: true, + status: { + value: { + enabled: true, + kbState: KnowledgeBaseState.NOT_INSTALLED, + concreteWriteIndex: 'my-index', + currentInferenceId: 'inference_id', + isReIndexing: false, + }, + loading: false, + refresh: jest.fn(), + }, + }); + const { rerender } = renderComponent(kb); + + // Should not see success banner initially + expect(screen.queryByText(/Knowledge base successfully installed/i)).toBeNull(); + + kb = { + ...kb, + isInstalling: false, + isPolling: false, + status: { + ...kb.status, + value: { + ...kb.status.value, + enabled: true, + endpoint: { inference_id: 'inference_id' }, + kbState: KnowledgeBaseState.READY, + concreteWriteIndex: 'my-index', + currentInferenceId: 'inference_id', + isReIndexing: false, + }, + loading: false, + refresh: jest.fn(), + }, + }; + + await act(async () => { + rerender(); + }); + + // Now we should see success banner + expect(screen.getByText(/Knowledge base successfully installed/i)).toBeInTheDocument(); + }); + + it('renders "We are setting up your knowledge base" with the inspect button', () => { + const kb = createMockKnowledgeBase({ + isInstalling: false, + isPolling: true, + status: { + value: { + enabled: true, + endpoint: { inference_id: 'inference_id' }, + kbState: KnowledgeBaseState.DEPLOYING_MODEL, + concreteWriteIndex: 'my-index', + currentInferenceId: 'inference_id', + isReIndexing: false, + modelStats: { + deployment_stats: { + state: 'starting', + deployment_id: 'deployment_id', + model_id: 'model_id', + nodes: [], + peak_throughput_per_minute: 0, + priority: 'normal', + start_time: 0, + }, + }, + }, + loading: false, + refresh: jest.fn(), + }, + }); + renderComponent(kb); + + expect(screen.getByText(/We are setting up your knowledge base/i)).toBeInTheDocument(); + expect(screen.getByText(/Inspect/i)).toBeInTheDocument(); + }); + + it('renders "Knowledge Base setup failed" with inspect issues', () => { + const kb = createMockKnowledgeBase({ + isInstalling: false, + isPolling: true, + status: { + value: { + enabled: true, + endpoint: { inference_id: 'inference_id' }, + kbState: KnowledgeBaseState.ERROR, + concreteWriteIndex: 'my-index', + currentInferenceId: 'inference_id', + isReIndexing: false, + modelStats: { + deployment_stats: { + reason: 'model deployment failed', + state: 'failed', + deployment_id: 'deployment_id', + model_id: 'model_id', + nodes: [], + peak_throughput_per_minute: 0, + priority: 'normal', + start_time: 0, + }, + }, + }, + loading: false, + refresh: jest.fn(), + }, + }); + renderComponent(kb); + + expect( + screen.getByText(/Knowledge Base setup failed. Check 'Inspect' for details./i) + ).toBeInTheDocument(); + expect(screen.getAllByText(/Inspect/i)).toHaveLength(2); + }); + + it('renders "We are setting up your knowledge base" if model is not ready but endpoint exists', () => { + const kb = createMockKnowledgeBase({ + isPolling: true, + status: { + value: { + enabled: true, + endpoint: { inference_id: 'inference_id' }, + kbState: KnowledgeBaseState.DEPLOYING_MODEL, + concreteWriteIndex: 'my-index', + currentInferenceId: 'inference_id', + isReIndexing: false, + modelStats: { + deployment_stats: { + reason: 'model deployment paused', + deployment_id: 'deployment_id', + model_id: 'model_id', + nodes: [], + peak_throughput_per_minute: 0, + priority: 'normal', + start_time: 0, + }, + }, + }, + loading: false, + refresh: jest.fn(), + }, + }); + renderComponent(kb); + + expect(screen.getByText(/We are setting up your knowledge base/i)).toBeInTheDocument(); + expect(screen.getByText(/Inspect/i)).toBeInTheDocument(); + }); + + it('renders nothing if the knowledge base is already installed', () => { + const kb = createMockKnowledgeBase({ + status: { + value: { + kbState: KnowledgeBaseState.READY, + endpoint: { inference_id: 'inference_id' }, + enabled: true, + concreteWriteIndex: 'my-index', + currentInferenceId: 'inference_id', + isReIndexing: false, + }, + loading: false, + error: undefined, + refresh: jest.fn(), + }, + }); + renderComponent(kb); + + expect(screen.queryByText(/We are setting up your knowledge base/i)).toBeNull(); + expect(screen.queryByText(/Get started by setting up the Knowledge Base/i)).toBeNull(); + expect(screen.queryByText(/Knowledge base successfully installed/i)).toBeNull(); + }); +}); diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/welcome_message_knowledge_base.tsx b/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/welcome_message_knowledge_base.tsx new file mode 100644 index 0000000000000..909856196437e --- /dev/null +++ b/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/welcome_message_knowledge_base.tsx @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React, { useEffect, useState } from 'react'; +import { i18n } from '@kbn/i18n'; +import { EuiFlexGroup, EuiFlexItem, EuiIcon, EuiText } from '@elastic/eui'; +import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/public'; +import usePrevious from 'react-use/lib/usePrevious'; +import { UseKnowledgeBaseResult } from '../hooks'; +import { KnowledgeBaseInstallationStatusPanel } from './knowledge_base_installation_status_panel'; +import { SettingUpKnowledgeBase } from './setting_up_knowledge_base'; + +export function WelcomeMessageKnowledgeBase({ + knowledgeBase, +}: { + knowledgeBase: UseKnowledgeBaseResult; +}) { + const prevIsInstalling = usePrevious(knowledgeBase.isInstalling || knowledgeBase.isPolling); + const [showSuccessBanner, setShowSuccessBanner] = useState(false); + + useEffect(() => { + if (prevIsInstalling) { + setShowSuccessBanner(true); + } + }, [knowledgeBase.isInstalling, prevIsInstalling]); + + if (knowledgeBase.isInstalling) { + return ; + } + + if (knowledgeBase.status.value?.kbState === KnowledgeBaseState.READY) { + return showSuccessBanner ? ( +
+ + + + + + + {i18n.translate( + 'xpack.aiAssistant.welcomeMessage.knowledgeBaseSuccessfullyInstalledLabel', + { defaultMessage: 'Knowledge base successfully installed' } + )} + + + +
+ ) : null; + } + + return ; +} diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/welcome_message_knowledge_base_setup_error_panel.tsx b/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/welcome_message_knowledge_base_setup_error_panel.tsx similarity index 69% rename from x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/welcome_message_knowledge_base_setup_error_panel.tsx rename to x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/welcome_message_knowledge_base_setup_error_panel.tsx index f7267d6d21855..92d14a7109d6f 100644 --- a/x-pack/platform/packages/shared/kbn-ai-assistant/src/chat/welcome_message_knowledge_base_setup_error_panel.tsx +++ b/x-pack/platform/packages/shared/kbn-ai-assistant/src/knowledge_base/welcome_message_knowledge_base_setup_error_panel.tsx @@ -33,13 +33,16 @@ export function WelcomeMessageKnowledgeBaseSetupErrorPanel({ onRetryInstall, }: { knowledgeBase: UseKnowledgeBaseResult; - onRetryInstall: () => void; + onRetryInstall: (inferenceId: string) => void; }) { const { http } = useKibana().services; const modelId = knowledgeBase.status.value?.endpoint?.service_settings?.model_id; - const deploymentState = knowledgeBase.status.value?.model_stats?.deployment_state; - const allocationState = knowledgeBase.status.value?.model_stats?.allocation_state; + const deploymentState = knowledgeBase.status.value?.modelStats?.deployment_stats?.state; + const deploymentReason = knowledgeBase.status.value?.modelStats?.deployment_stats?.reason; + const allocationState = + knowledgeBase.status.value?.modelStats?.deployment_stats?.allocation_status?.state; + const inferenceId = knowledgeBase.status.value?.modelStats?.deployment_stats?.deployment_id; return (
    - {!deploymentState ? ( -
  • - {' '} - {modelId}, - }} - /> -
  • + {deploymentState ? ( + <> +
  • + {' '} + {modelId}, + }} + /> +
  • +
  • + {' '} + {modelId}, + deploymentState: {deploymentState}, + }} + /> +
  • + ) : null} - {deploymentState && deploymentState !== 'started' ? ( + {deploymentReason ? (
  • {' '} {modelId}, - deploymentState: {deploymentState}, + reason: {deploymentReason}, }} />
  • ) : null} - {allocationState && allocationState !== 'fully_allocated' ? ( + {allocationState ? (
  • {' '} onRetryInstall(inferenceId!)} // TODO: check behaviour in error state > {i18n.translate( 'xpack.aiAssistant.welcomeMessageKnowledgeBaseSetupErrorPanel.retryInstallingLinkLabel', diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/utils/get_model_options_for_inference_endpoints.test.ts b/x-pack/platform/packages/shared/kbn-ai-assistant/src/utils/get_model_options_for_inference_endpoints.test.ts new file mode 100644 index 0000000000000..a0d801f465bae --- /dev/null +++ b/x-pack/platform/packages/shared/kbn-ai-assistant/src/utils/get_model_options_for_inference_endpoints.test.ts @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { + e5SmallDescription, + e5SmallTitle, + elserDescription, + elserTitle, + getModelOptionsForInferenceEndpoints, + ModelOptionsData, +} from './get_model_options_for_inference_endpoints'; +import type { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils'; + +describe('getModelOptionsForInferenceEndpoints', () => { + it('maps known inference endpoints to user-friendly titles and descriptions', () => { + const endpoints = [ + { inference_id: '.elser-2-elasticsearch' }, + { inference_id: '.multilingual-e5-small-elasticsearch' }, + ] as InferenceAPIConfigResponse[]; + + const options: ModelOptionsData[] = getModelOptionsForInferenceEndpoints({ endpoints }); + + expect(options).toEqual([ + { + key: '.elser-2-elasticsearch', + label: elserTitle, + description: elserDescription, + }, + { + key: '.multilingual-e5-small-elasticsearch', + label: e5SmallTitle, + description: e5SmallDescription, + }, + ]); + }); +}); diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/src/utils/get_model_options_for_inference_endpoints.ts b/x-pack/platform/packages/shared/kbn-ai-assistant/src/utils/get_model_options_for_inference_endpoints.ts new file mode 100644 index 0000000000000..ef0f9030333a5 --- /dev/null +++ b/x-pack/platform/packages/shared/kbn-ai-assistant/src/utils/get_model_options_for_inference_endpoints.ts @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { i18n } from '@kbn/i18n'; +import { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils'; + +export interface ModelOptionsData { + key: string; + label: string; + description: string; +} + +export const elserTitle = i18n.translate( + 'xpack.aiAssistant.welcomeMessage.knowledgeBase.model.elserTitle', + { + defaultMessage: 'ELSER v2 (English-only)', + } +); + +export const elserDescription = i18n.translate( + 'xpack.aiAssistant.welcomeMessage.knowledgeBase.model.elserDescription', + { + defaultMessage: + 'Focus on query meaning, not just keyword matching, using learned associations between terms. It delivers more relevant, context-aware results and works out of the box with no need for deep machine learning expertise.', + } +); + +export const e5SmallTitle = i18n.translate( + 'xpack.aiAssistant.welcomeMessage.knowledgeBase.model.e5smallTitle', + { + defaultMessage: 'E5-small (multilingual)', + } +); + +export const e5SmallDescription = i18n.translate( + 'xpack.aiAssistant.welcomeMessage.knowledgeBase.model.e5smallDescription', + { + defaultMessage: + 'E5 is an NLP model by Elastic designed to enhance multilingual semantic search by focusing on query context rather than keywords. E5-small is a cross-platform version compatible with different hardware configurations.', + } +); + +const e5LargeTitle = i18n.translate( + 'xpack.aiAssistant.welcomeMessage.knowledgeBase.model.e5largeTitle', + { + defaultMessage: 'E5-large (multilingual)', + } +); + +const e5LargeDescription = i18n.translate( + 'xpack.aiAssistant.welcomeMessage.knowledgeBase.model.e5largeDescription', + { + defaultMessage: + 'E5 is an NLP model by Elastic designed to enhance multilingual semantic search by focusing on query context rather than keywords. E5-large is an optimized version for Intel® silicon.', + } +); + +const PRECONFIGURED_INFERENCE_ENDPOINT_METADATA: Record< + string, + { title: string; description: string } +> = { + '.elser-2-elasticsearch': { + title: elserTitle, + description: elserDescription, + }, + '.elser-v2-elastic': { + title: elserTitle, + description: elserDescription, + }, + '.multilingual-e5-small-elasticsearch': { + title: e5SmallTitle, + description: e5SmallDescription, + }, + '.multilingual-e5-large-elasticsearch': { + title: e5LargeTitle, + description: e5LargeDescription, + }, +}; + +export const getModelOptionsForInferenceEndpoints = ({ + endpoints, +}: { + endpoints: InferenceAPIConfigResponse[]; +}): ModelOptionsData[] => { + // TODO: add logic to show the EIS models if EIS is enabled, if not show the other models + const preConfiguredEndpoints = endpoints + .map((endpoint) => { + const meta = PRECONFIGURED_INFERENCE_ENDPOINT_METADATA[endpoint.inference_id]; + + if (!meta) { + return undefined; + } + + return { + key: endpoint.inference_id, + label: meta.title, + description: meta.description, + }; + }) + .filter(Boolean) as ModelOptionsData[]; + + return preConfiguredEndpoints; +}; diff --git a/x-pack/platform/packages/shared/kbn-ai-assistant/tsconfig.json b/x-pack/platform/packages/shared/kbn-ai-assistant/tsconfig.json index 7ad67352664ee..cd3e5185a8924 100644 --- a/x-pack/platform/packages/shared/kbn-ai-assistant/tsconfig.json +++ b/x-pack/platform/packages/shared/kbn-ai-assistant/tsconfig.json @@ -43,5 +43,6 @@ "@kbn/ai-assistant-icon", "@kbn/datemath", "@kbn/security-plugin-types-common", + "@kbn/ml-trained-models-utils", ] } diff --git a/x-pack/platform/plugins/private/translations/translations/fr-FR.json b/x-pack/platform/plugins/private/translations/translations/fr-FR.json index 3ff7435ed9ff6..1ecf078cf6c44 100644 --- a/x-pack/platform/plugins/private/translations/translations/fr-FR.json +++ b/x-pack/platform/plugins/private/translations/translations/fr-FR.json @@ -10247,7 +10247,6 @@ "xpack.aiAssistant.chatTimeline.actions.inspectPrompt": "Inspecter l'invite", "xpack.aiAssistant.chatTimeline.messages.elasticAssistant.label": "Assistant d'Elastic", "xpack.aiAssistant.chatTimeline.messages.user.label": "Vous", - "xpack.aiAssistant.checkingKbAvailability": "Vérification de la disponibilité de la base de connaissances", "xpack.aiAssistant.conversationList.dateGroupTitle.lastMonth": "Mois dernier", "xpack.aiAssistant.conversationList.dateGroupTitle.lastWeek": "Semaine dernière", "xpack.aiAssistant.conversationList.dateGroupTitle.older": "Plus ancien", @@ -10264,11 +10263,8 @@ "xpack.aiAssistant.couldNotFindConversationTitle": "Conversation introuvable", "xpack.aiAssistant.disclaimer.disclaimerLabel": "Ce chat est alimenté par une intégration avec votre fournisseur LLM. Il arrive que les grands modèles de langage (LLM) présentent comme correctes des informations incorrectes. Elastic prend en charge la configuration ainsi que la connexion au fournisseur LLM et à votre base de connaissances, mais n'est pas responsable des réponses fournies par le LLM.", "xpack.aiAssistant.emptyConversationTitle": "Nouvelle conversation", - "xpack.aiAssistant.errorSettingUpInferenceEndpoint": "Impossible de créer le point de terminaison d'inférence", "xpack.aiAssistant.errorUpdatingConversation": "Impossible de mettre à jour la conversation", "xpack.aiAssistant.executedFunctionFailureEvent": "impossible d'exécuter la fonction {functionName}", - "xpack.aiAssistant.failedToGetStatus": "Échec de l'obtention du statut du modèle.", - "xpack.aiAssistant.failedToSetupKnowledgeBase": "Échec de la configuration de la base de connaissances.", "xpack.aiAssistant.flyout.confirmDeleteButtonText": "Supprimer la conversation", "xpack.aiAssistant.flyout.confirmDeleteConversationContent": "Cette action ne peut pas être annulée.", "xpack.aiAssistant.flyout.confirmDeleteConversationTitle": "Supprimer cette conversation ?", @@ -10280,13 +10276,10 @@ "xpack.aiAssistant.incorrectLicense.title": "Mettez votre licence à niveau", "xpack.aiAssistant.initialSetupPanel.setupConnector.buttonLabel": "Configurer un connecteur GenAI", "xpack.aiAssistant.initialSetupPanel.setupConnector.description2": "Commencez à travailler avec l'assistant AI Elastic en configurant un connecteur pour votre fournisseur d'IA. Le modèle doit prendre en charge les appels de fonction. Lorsque vous utilisez OpenAI ou Azure, nous vous recommandons d'utiliser GPT4.", - "xpack.aiAssistant.installingKb": "Configuration de la base de connaissances", "xpack.aiAssistant.newChatButton": "Nouvelle conversation", - "xpack.aiAssistant.poweredByModel": "Alimenté par {model}", "xpack.aiAssistant.prompt.placeholder": "Envoyer un message à l'assistant", "xpack.aiAssistant.promptEditorNaturalLanguage.euiSelectable.selectAnOptionLabel": "Sélectionner une option", "xpack.aiAssistant.settingsPage.goToConnectorsButtonLabel": "Gérer les connecteurs", - "xpack.aiAssistant.setupKb": "Améliorez votre expérience en configurant la base de connaissances.", "xpack.aiAssistant.simulatedFunctionCallingCalloutLabel": "L'appel de fonctions simulées est activé. Vous risquez de voir les performances se dégrader.", "xpack.aiAssistant.suggestedFunctionEvent": "a demandé la fonction {functionName}", "xpack.aiAssistant.technicalPreviewBadgeDescription": "GTP4 est nécessaire pour bénéficier d'une meilleure expérience avec les appels de fonctions (par exemple lors de la réalisation d'analyse de la cause d'un problème, de la visualisation de données et autres). GPT3.5 peut fonctionner pour certains des workflows les plus simples comme les explications d'erreurs ou pour bénéficier d'une expérience comparable à ChatGPT au sein de Kibana à partir du moment où les appels de fonctions ne sont pas fréquents.", @@ -10300,12 +10293,10 @@ "xpack.aiAssistant.welcomeMessage.modelIsNotDeployedLabel": "Le modèle {modelId} n'est pas déployé", "xpack.aiAssistant.welcomeMessage.modelIsNotFullyAllocatedLabel": "L'état d'allocation de {modelId} est {allocationState}", "xpack.aiAssistant.welcomeMessage.modelIsNotStartedLabel": "L'état de déploiement de {modelId} est {deploymentState}", - "xpack.aiAssistant.welcomeMessage.retryButtonLabel": "Installer la base de connaissances", "xpack.aiAssistant.welcomeMessage.trainedModelsLinkLabel": "Modèles entraînés", "xpack.aiAssistant.welcomeMessage.weAreSettingUpTextLabel": "Nous configurons votre base de connaissances. Cette opération peut prendre quelques minutes. Vous pouvez continuer à utiliser l'Assistant lors de ce processus.", "xpack.aiAssistant.welcomeMessageConnectors.connectorsErrorTextLabel": "Impossible de charger les connecteurs", "xpack.aiAssistant.welcomeMessageConnectors.connectorsForbiddenTextLabel": "Vous n'avez pas les autorisations requises pour charger les connecteurs", - "xpack.aiAssistant.welcomeMessageKnowledgeBase.yourKnowledgeBaseIsNotSetUpCorrectlyLabel": "Votre base de connaissances n'a pas été configurée.", "xpack.aiAssistant.welcomeMessageKnowledgeBaseSetupErrorPanel.retryInstallingLinkLabel": "Réessayer l'installation", "xpack.aiops.actions.openChangePointInMlAppName": "Ouvrir dans AIOps Labs", "xpack.aiops.analysis.analysisTypeDipFallbackInfoTitle": "Meilleurs éléments pour la plage temporelle de référence de base", diff --git a/x-pack/platform/plugins/private/translations/translations/ja-JP.json b/x-pack/platform/plugins/private/translations/translations/ja-JP.json index b6e82eecc314d..c12fb7aff9817 100644 --- a/x-pack/platform/plugins/private/translations/translations/ja-JP.json +++ b/x-pack/platform/plugins/private/translations/translations/ja-JP.json @@ -10237,7 +10237,6 @@ "xpack.aiAssistant.chatTimeline.actions.inspectPrompt": "プロンプトを検査", "xpack.aiAssistant.chatTimeline.messages.elasticAssistant.label": "Elastic Assistant", "xpack.aiAssistant.chatTimeline.messages.user.label": "あなた", - "xpack.aiAssistant.checkingKbAvailability": "ナレッジベースの利用可能性を確認中", "xpack.aiAssistant.conversationList.dateGroupTitle.lastMonth": "先月", "xpack.aiAssistant.conversationList.dateGroupTitle.lastWeek": "先週", "xpack.aiAssistant.conversationList.dateGroupTitle.older": "古い", @@ -10254,11 +10253,8 @@ "xpack.aiAssistant.couldNotFindConversationTitle": "会話が見つかりません", "xpack.aiAssistant.disclaimer.disclaimerLabel": "この会話は、LLMプロバイダーとの統合によって提供されています。LLMは、正しくない情報を正しい情報であるかのように表示する場合があることが知られています。Elasticは、構成やLLMプロバイダーへの接続、お客様のナレッジベースへの接続はサポートしますが、LLMの応答については責任を負いません。", "xpack.aiAssistant.emptyConversationTitle": "新しい会話", - "xpack.aiAssistant.errorSettingUpInferenceEndpoint": "推論エンドポイントを作成できませんでした", "xpack.aiAssistant.errorUpdatingConversation": "会話を更新できませんでした", "xpack.aiAssistant.executedFunctionFailureEvent": "関数{functionName}の実行に失敗しました", - "xpack.aiAssistant.failedToGetStatus": "モデルステータスを取得できませんでした。", - "xpack.aiAssistant.failedToSetupKnowledgeBase": "ナレッジベースをセットアップできませんでした。", "xpack.aiAssistant.flyout.confirmDeleteButtonText": "会話を削除", "xpack.aiAssistant.flyout.confirmDeleteConversationContent": "この操作は元に戻すことができません。", "xpack.aiAssistant.flyout.confirmDeleteConversationTitle": "この会話を削除しますか?", @@ -10270,13 +10266,10 @@ "xpack.aiAssistant.incorrectLicense.title": "ライセンスをアップグレード", "xpack.aiAssistant.initialSetupPanel.setupConnector.buttonLabel": "GenAIコネクターをセットアップ", "xpack.aiAssistant.initialSetupPanel.setupConnector.description2": "Elastic AI Assistantの使用を開始するには、AIプロバイダーのコネクターを設定します。モデルは関数呼び出しをサポートしている必要があります。OpenAIまたはAzureを使用するときには、GPT4を使用することをお勧めします。", - "xpack.aiAssistant.installingKb": "ナレッジベースをセットアップ中", "xpack.aiAssistant.newChatButton": "新しい会話", - "xpack.aiAssistant.poweredByModel": "{model}で駆動", "xpack.aiAssistant.prompt.placeholder": "アシスタントにメッセージを送信", "xpack.aiAssistant.promptEditorNaturalLanguage.euiSelectable.selectAnOptionLabel": "オプションを選択", "xpack.aiAssistant.settingsPage.goToConnectorsButtonLabel": "コネクターを管理", - "xpack.aiAssistant.setupKb": "ナレッジベースを設定することで、エクスペリエンスが改善されます。", "xpack.aiAssistant.simulatedFunctionCallingCalloutLabel": "シミュレートされた関数呼び出しが有効です。パフォーマンスが劣化する場合があります。", "xpack.aiAssistant.suggestedFunctionEvent": "関数{functionName}を要求しました", "xpack.aiAssistant.technicalPreviewBadgeDescription": "関数呼び出し(根本原因分析やデータの視覚化など)を使用する際に、より一貫性のあるエクスペリエンスを実現するために、GPT4が必要です。GPT3.5は、エラーの説明などのシンプルなワークフローの一部や、頻繁な関数呼び出しの使用が必要とされないKibana内のエクスペリエンスなどのChatGPTで機能します。", @@ -10290,12 +10283,10 @@ "xpack.aiAssistant.welcomeMessage.modelIsNotDeployedLabel": "モデル\"{modelId}\"はデプロイされていません", "xpack.aiAssistant.welcomeMessage.modelIsNotFullyAllocatedLabel": "\"{modelId}\"の割り当て状態は{allocationState}です", "xpack.aiAssistant.welcomeMessage.modelIsNotStartedLabel": "\"{modelId}\"のデプロイ状態は{deploymentState}です", - "xpack.aiAssistant.welcomeMessage.retryButtonLabel": "ナレッジベースをインストール", "xpack.aiAssistant.welcomeMessage.trainedModelsLinkLabel": "学習済みモデル", "xpack.aiAssistant.welcomeMessage.weAreSettingUpTextLabel": "ナレッジベースをセットアップしています。これには数分かかる場合があります。この処理の実行中には、アシスタントを使用し続けることができます。", "xpack.aiAssistant.welcomeMessageConnectors.connectorsErrorTextLabel": "コネクターを読み込めませんでした", "xpack.aiAssistant.welcomeMessageConnectors.connectorsForbiddenTextLabel": "コネクターを取得するために必要な権限が不足しています", - "xpack.aiAssistant.welcomeMessageKnowledgeBase.yourKnowledgeBaseIsNotSetUpCorrectlyLabel": "ナレッジベースはセットアップされていません。", "xpack.aiAssistant.welcomeMessageKnowledgeBaseSetupErrorPanel.retryInstallingLinkLabel": "インストールを再試行", "xpack.aiops.actions.openChangePointInMlAppName": "AIOps Labsで開く", "xpack.aiops.analysis.analysisTypeDipFallbackInfoTitle": "ベースライン時間範囲の上位のアイテム", diff --git a/x-pack/platform/plugins/private/translations/translations/zh-CN.json b/x-pack/platform/plugins/private/translations/translations/zh-CN.json index 4ea06b28f0f38..bd4c39d530643 100644 --- a/x-pack/platform/plugins/private/translations/translations/zh-CN.json +++ b/x-pack/platform/plugins/private/translations/translations/zh-CN.json @@ -10255,7 +10255,6 @@ "xpack.aiAssistant.chatTimeline.actions.inspectPrompt": "检查提示", "xpack.aiAssistant.chatTimeline.messages.elasticAssistant.label": "Elastic 助手", "xpack.aiAssistant.chatTimeline.messages.user.label": "您", - "xpack.aiAssistant.checkingKbAvailability": "正在检查知识库的可用性", "xpack.aiAssistant.conversationList.dateGroupTitle.lastMonth": "上个月", "xpack.aiAssistant.conversationList.dateGroupTitle.lastWeek": "上周", "xpack.aiAssistant.conversationList.dateGroupTitle.older": "更早", @@ -10272,11 +10271,8 @@ "xpack.aiAssistant.couldNotFindConversationTitle": "未找到对话", "xpack.aiAssistant.disclaimer.disclaimerLabel": "通过集成 LLM 提供商来支持此对话。众所周知,LLM 有时会提供错误信息,好像它是正确的。Elastic 支持配置并连接到 LLM 提供商和知识库,但不对 LLM 响应负责。", "xpack.aiAssistant.emptyConversationTitle": "新对话", - "xpack.aiAssistant.errorSettingUpInferenceEndpoint": "无法创建推理终端", "xpack.aiAssistant.errorUpdatingConversation": "无法更新对话", "xpack.aiAssistant.executedFunctionFailureEvent": "无法执行函数 {functionName}", - "xpack.aiAssistant.failedToGetStatus": "无法获取模型状态。", - "xpack.aiAssistant.failedToSetupKnowledgeBase": "无法设置知识库。", "xpack.aiAssistant.flyout.confirmDeleteButtonText": "删除对话", "xpack.aiAssistant.flyout.confirmDeleteConversationContent": "此操作无法撤消。", "xpack.aiAssistant.flyout.confirmDeleteConversationTitle": "删除此对话?", @@ -10288,13 +10284,10 @@ "xpack.aiAssistant.incorrectLicense.title": "升级您的许可证", "xpack.aiAssistant.initialSetupPanel.setupConnector.buttonLabel": "设置 GenAI 连接器", "xpack.aiAssistant.initialSetupPanel.setupConnector.description2": "通过为您的 AI 提供商设置连接器,开始使用 Elastic AI 助手。此模型需要支持函数调用。使用 OpenAI 或 Azure 时,建议使用 GPT4。", - "xpack.aiAssistant.installingKb": "正在设置知识库", - "xpack.aiAssistant.newChatButton": "新对话", - "xpack.aiAssistant.poweredByModel": "由 {model} 提供支持", + "xpack.aiAssistant.newChatButton": "新聊天", "xpack.aiAssistant.prompt.placeholder": "向助手发送消息", "xpack.aiAssistant.promptEditorNaturalLanguage.euiSelectable.selectAnOptionLabel": "选择选项", "xpack.aiAssistant.settingsPage.goToConnectorsButtonLabel": "管理连接器", - "xpack.aiAssistant.setupKb": "通过设置知识库来改进体验。", "xpack.aiAssistant.simulatedFunctionCallingCalloutLabel": "模拟函数调用已启用。您可能会面临性能降级。", "xpack.aiAssistant.suggestedFunctionEvent": "已请求函数 {functionName}", "xpack.aiAssistant.technicalPreviewBadgeDescription": "需要 GPT4 以在使用函数调用时(例如,执行根本原因分析、数据可视化等时候)获得更加一致的体验。GPT3.5 可作用于某些更简单的工作流(如解释错误),或在 Kibana 中获得不需要频繁使用函数调用的与 ChatGPT 类似的体验。", @@ -10308,12 +10301,10 @@ "xpack.aiAssistant.welcomeMessage.modelIsNotDeployedLabel": "未部署模型 {modelId}", "xpack.aiAssistant.welcomeMessage.modelIsNotFullyAllocatedLabel": "{modelId} 的分配状态为 {allocationState}", "xpack.aiAssistant.welcomeMessage.modelIsNotStartedLabel": "{modelId} 的部署状态为 {deploymentState}", - "xpack.aiAssistant.welcomeMessage.retryButtonLabel": "安装知识库", "xpack.aiAssistant.welcomeMessage.trainedModelsLinkLabel": "已训练模型", "xpack.aiAssistant.welcomeMessage.weAreSettingUpTextLabel": "我们正在设置您的知识库。这可能需要若干分钟。此进程处于运行状态时,您可以继续使用该助手。", "xpack.aiAssistant.welcomeMessageConnectors.connectorsErrorTextLabel": "无法加载连接器", "xpack.aiAssistant.welcomeMessageConnectors.connectorsForbiddenTextLabel": "缺少获取连接器所需的权限", - "xpack.aiAssistant.welcomeMessageKnowledgeBase.yourKnowledgeBaseIsNotSetUpCorrectlyLabel": "尚未设置您的知识库。", "xpack.aiAssistant.welcomeMessageKnowledgeBaseSetupErrorPanel.retryInstallingLinkLabel": "重试安装", "xpack.aiops.actions.openChangePointInMlAppName": "在 Aiops 实验室中打开", "xpack.aiops.analysis.analysisTypeDipFallbackInfoTitle": "基线时间范围的主要项目", diff --git a/x-pack/platform/plugins/shared/inference/server/chat_complete/errors.ts b/x-pack/platform/plugins/shared/inference/server/chat_complete/errors.ts index 00ef2d90ca1f7..52655834b48c4 100644 --- a/x-pack/platform/plugins/shared/inference/server/chat_complete/errors.ts +++ b/x-pack/platform/plugins/shared/inference/server/chat_complete/errors.ts @@ -31,7 +31,7 @@ export function createTokenLimitReachedError( export function createToolNotFoundError(name: string): ChatCompletionToolNotFoundError { return new InferenceTaskError( ChatCompletionErrorCode.ToolNotFoundError, - `Tool ${name} called but was not available`, + `Tool "${name}" called but was not available`, { name, } diff --git a/x-pack/platform/plugins/shared/inference/server/util/validate_tool_calls.test.ts b/x-pack/platform/plugins/shared/inference/server/util/validate_tool_calls.test.ts index 8691a50cda207..6320fb9f85bab 100644 --- a/x-pack/platform/plugins/shared/inference/server/util/validate_tool_calls.test.ts +++ b/x-pack/platform/plugins/shared/inference/server/util/validate_tool_calls.test.ts @@ -53,7 +53,9 @@ describe('validateToolCalls', () => { }, }, }) - ).toThrowErrorMatchingInlineSnapshot(`"Tool my_unknown_function called but was not available"`); + ).toThrowErrorMatchingInlineSnapshot( + `"Tool \\"my_unknown_function\\" called but was not available"` + ); }); it('throws an error if invalid JSON was generated', () => { diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/common/conversation_complete.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/common/conversation_complete.ts index 644be8b26e33a..ba1836dacaea4 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/common/conversation_complete.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/common/conversation_complete.ts @@ -167,7 +167,7 @@ export function createInternalServerError( export function createFunctionNotFoundError(name: string) { return new ChatCompletionError( ChatCompletionErrorCode.FunctionNotFoundError, - `Function ${name} called but was not available` + `Function "${name}" called but was not available` ); } diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/common/index.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/common/index.ts index 54ad61b818c91..f0a19c7acab25 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/common/index.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/common/index.ts @@ -6,7 +6,13 @@ */ export type { Message, Conversation, KnowledgeBaseEntry, ConversationCreateRequest } from './types'; -export { KnowledgeBaseEntryRole, MessageRole, ConversationAccess } from './types'; +export { + KnowledgeBaseEntryRole, + MessageRole, + ConversationAccess, + KnowledgeBaseType, + KnowledgeBaseState, +} from './types'; export type { FunctionDefinition, CompatibleJSONSchema } from './functions/types'; export { FunctionVisibility } from './functions/function_visibility'; export { @@ -46,5 +52,3 @@ export { export { concatenateChatCompletionChunks } from './utils/concatenate_chat_completion_chunks'; export { ShortIdTable } from './utils/short_id_table'; - -export { KnowledgeBaseType } from './types'; diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/common/types.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/common/types.ts index a193a7193d73f..cedd996d7964b 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/common/types.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/common/types.ts @@ -103,6 +103,15 @@ export enum KnowledgeBaseType { Contextual = 'contextual', } +export enum KnowledgeBaseState { + NOT_INSTALLED = 'NOT_INSTALLED', + MODEL_PENDING_DEPLOYMENT = 'MODEL_PENDING_DEPLOYMENT', + DEPLOYING_MODEL = 'DEPLOYING_MODEL', + MODEL_PENDING_ALLOCATION = 'MODEL_PENDING_ALLOCATION', + READY = 'READY', + ERROR = 'ERROR', +} + export interface ObservabilityAIAssistantScreenContextRequest { starterPrompts?: StarterPrompt[]; screenDescription?: string; diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/public/index.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/public/index.ts index c99a4ce206659..22c11b79079b0 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/public/index.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/public/index.ts @@ -68,6 +68,7 @@ export { StreamingChatResponseEventType, ConversationAccess, KnowledgeBaseType, + KnowledgeBaseState, } from '../common'; export type { diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/functions/context.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/functions/context.ts index 4ccdc495435a9..185acbedeb992 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/functions/context.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/functions/context.ts @@ -55,6 +55,9 @@ export function registerContextFunction({ }; if (!isKnowledgeBaseReady) { + resources.logger.warn( + 'Knowledge base is not ready yet. Returning context function response without knowledge base entries.' + ); return { content }; } diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/functions/get_dataset_info/get_relevant_field_names.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/functions/get_dataset_info/get_relevant_field_names.ts index 8acbc59903dcf..40c2d7e5c19aa 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/functions/get_dataset_info/get_relevant_field_names.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/functions/get_dataset_info/get_relevant_field_names.ts @@ -13,7 +13,7 @@ import { MessageRole, ShortIdTable, type Message } from '../../../common'; import { concatenateChatCompletionChunks } from '../../../common/utils/concatenate_chat_completion_chunks'; import { FunctionCallChatFunction } from '../../service/types'; -const SELECT_RELEVANT_FIELDS_NAME = 'select_relevant_fields'; +export const SELECT_RELEVANT_FIELDS_NAME = 'select_relevant_fields'; export const GET_RELEVANT_FIELD_NAMES_SYSTEM_MESSAGE = `You are a helpful assistant for Elastic Observability. Your task is to determine which fields are relevant to the conversation by selecting only the field IDs from the provided list. The list in the user message consists of JSON objects that map a human-readable field "name" to its unique "id". diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/functions/index.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/functions/index.ts index 590b65749c5c6..8c0fd608cdfa4 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/functions/index.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/functions/index.ts @@ -6,6 +6,7 @@ */ import dedent from 'dedent'; +import { KnowledgeBaseState } from '../../common'; import { CONTEXT_FUNCTION_NAME, registerContextFunction } from './context'; import { registerSummarizationFunction, SUMMARIZE_FUNCTION_NAME } from './summarize'; import type { RegistrationCallback } from '../service/types'; @@ -81,7 +82,8 @@ ${ If the user asks how to change the language, reply in the same language the user asked in.`); } - const { ready: isKnowledgeBaseReady } = await client.getKnowledgeBaseStatus(); + const { kbState } = await client.getKnowledgeBaseStatus(); + const isKnowledgeBaseReady = kbState === KnowledgeBaseState.READY; functions.registerInstruction(({ availableFunctionNames }) => { const instructions: string[] = []; diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/plugin.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/plugin.ts index 8e96245d2a097..ab8bf1738a729 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/plugin.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/plugin.ts @@ -31,9 +31,7 @@ import { registerFunctions } from './functions'; import { recallRankingEvent } from './analytics/recall_ranking'; import { initLangtrace } from './service/client/instrumentation/init_langtrace'; import { aiAssistantCapabilities } from '../common/capabilities'; -import { populateMissingSemanticTextFieldMigration } from './service/startup_migrations/populate_missing_semantic_text_field_migration'; -import { updateExistingIndexAssets } from './service/startup_migrations/create_or_update_index_assets'; - +import { runStartupMigrations } from './service/startup_migrations/run_startup_migrations'; export class ObservabilityAIAssistantPlugin implements Plugin< @@ -128,19 +126,12 @@ export class ObservabilityAIAssistantPlugin })); // Update existing index assets (mappings, templates, etc). This will not create assets if they do not exist. - updateExistingIndexAssets({ logger: this.logger, core }) - .then(() => - populateMissingSemanticTextFieldMigration({ - core, - logger: this.logger, - config: this.config, - }) - ) - .catch((e) => - this.logger.error( - `Error during knowledge base migration in AI Assistant plugin startup: ${e.message}` - ) - ); + + runStartupMigrations({ + core, + logger: this.logger, + config: this.config, + }).catch((e) => this.logger.error(`Error while running startup migrations: ${e.message}`)); service.register(registerFunctions); diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/chat/route.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/chat/route.ts index bb5a5095e30a5..3b06f11854c03 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/chat/route.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/chat/route.ts @@ -256,6 +256,8 @@ async function chatComplete( }, } = params; + resources.logger.debug(`Initializing chat request with ${messages.length} messages`); + const { client, isCloudEnabled, signal, simulateFunctionCalling } = await initializeChatRequest( resources ); diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/conversations/route.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/conversations/route.ts index ef2fc5b2a9265..d6dc514a9e24b 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/conversations/route.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/conversations/route.ts @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import { notImplemented } from '@hapi/boom'; + import * as t from 'io-ts'; import { Conversation, MessageRole } from '../../../common/types'; import { createObservabilityAIAssistantServerRoute } from '../create_observability_ai_assistant_server_route'; @@ -40,10 +40,6 @@ const getConversationRoute = createObservabilityAIAssistantServerRoute({ const client = await service.getClient({ request }); - if (!client) { - throw notImplemented(); - } - const conversation = await client.get(params.path.conversationId); // conversation without system messages return getConversationWithoutSystemMessages(conversation); @@ -67,10 +63,6 @@ const findConversationsRoute = createObservabilityAIAssistantServerRoute({ const client = await service.getClient({ request }); - if (!client) { - throw notImplemented(); - } - const conversations = await client.find({ query: params?.body?.query }); return { @@ -96,11 +88,6 @@ const createConversationRoute = createObservabilityAIAssistantServerRoute({ const { service, request, params } = resources; const client = await service.getClient({ request }); - - if (!client) { - throw notImplemented(); - } - return client.create(params.body.conversation); }, }); @@ -121,11 +108,6 @@ const duplicateConversationRoute = createObservabilityAIAssistantServerRoute({ const { service, request, params } = resources; const client = await service.getClient({ request }); - - if (!client) { - throw notImplemented(); - } - return client.duplicateConversation(params.path.conversationId); }, }); @@ -149,11 +131,6 @@ const updateConversationRoute = createObservabilityAIAssistantServerRoute({ const { service, request, params } = resources; const client = await service.getClient({ request }); - - if (!client) { - throw notImplemented(); - } - return client.update(params.path.conversationId, params.body.conversation); }, }); @@ -174,11 +151,6 @@ const deleteConversationRoute = createObservabilityAIAssistantServerRoute({ const { service, request, params } = resources; const client = await service.getClient({ request }); - - if (!client) { - throw notImplemented(); - } - return client.delete(params.path.conversationId); }, }); @@ -204,10 +176,6 @@ const patchConversationRoute = createObservabilityAIAssistantServerRoute({ const client = await service.getClient({ request }); - if (!client) { - throw notImplemented(); - } - return client.updatePartial({ conversationId: params.path.conversationId, updates: params.body, diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/functions/route.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/functions/route.ts index e8cb7f1973726..3f75445bfa220 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/functions/route.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/functions/route.ts @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import { notImplemented } from '@hapi/boom'; + import { nonEmptyStringRt, toBooleanRt } from '@kbn/io-ts-utils'; import { context as otelContext } from '@opentelemetry/api'; import * as t from 'io-ts'; @@ -159,10 +159,6 @@ const functionRecallRoute = createObservabilityAIAssistantServerRoute({ body: { queries, categories }, } = resources.params; - if (!client) { - throw notImplemented(); - } - const entries = await client.recall({ queries, categories }); return { entries }; }, @@ -188,10 +184,6 @@ const functionSummariseRoute = createObservabilityAIAssistantServerRoute({ handler: async (resources): Promise => { const client = await resources.service.getClient({ request: resources.request }); - if (!client) { - throw notImplemented(); - } - const { title, confidence, diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/knowledge_base/route.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/knowledge_base/route.ts index 9957a68178ab3..a04b088c55775 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/knowledge_base/route.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/knowledge_base/route.ts @@ -6,18 +6,21 @@ */ import pLimit from 'p-limit'; -import { notImplemented } from '@hapi/boom'; import { nonEmptyStringRt, toBooleanRt } from '@kbn/io-ts-utils'; import * as t from 'io-ts'; import { InferenceInferenceEndpointInfo, - MlDeploymentAllocationState, - MlDeploymentAssignmentState, - MlTrainedModelDeploymentAllocationStatus, + MlTrainedModelStats, } from '@elastic/elasticsearch/lib/api/types'; -import moment from 'moment'; +import { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils'; +import pRetry from 'p-retry'; import { createObservabilityAIAssistantServerRoute } from '../create_observability_ai_assistant_server_route'; -import { Instruction, KnowledgeBaseEntry, KnowledgeBaseEntryRole } from '../../../common/types'; +import { + Instruction, + KnowledgeBaseEntry, + KnowledgeBaseEntryRole, + KnowledgeBaseState, +} from '../../../common/types'; const getKnowledgeBaseStatus = createObservabilityAIAssistantServerRoute({ endpoint: 'GET /internal/observability_ai_assistant/kb/status', @@ -31,77 +34,70 @@ const getKnowledgeBaseStatus = createObservabilityAIAssistantServerRoute({ request, }): Promise<{ errorMessage?: string; - ready: boolean; enabled: boolean; endpoint?: Partial; - model_stats?: { - deployment_state?: MlDeploymentAssignmentState; - allocation_state?: MlDeploymentAllocationState; - allocation_count?: MlTrainedModelDeploymentAllocationStatus['allocation_count']; - }; + modelStats?: Partial; + kbState: KnowledgeBaseState; + currentInferenceId: string | undefined; + concreteWriteIndex: string | undefined; + isReIndexing: boolean; }> => { const client = await service.getClient({ request }); - - if (!client) { - throw notImplemented(); - } - return client.getKnowledgeBaseStatus(); }, }); const setupKnowledgeBase = createObservabilityAIAssistantServerRoute({ endpoint: 'POST /internal/observability_ai_assistant/kb/setup', - params: t.partial({ - query: t.partial({ - model_id: t.string, + params: t.type({ + query: t.type({ + inference_id: t.string, }), }), - options: { - timeout: { - idleSocket: moment.duration(20, 'minutes').asMilliseconds(), - }, - }, security: { authz: { requiredPrivileges: ['ai_assistant'], }, }, - handler: async (resources): Promise => { + handler: async ( + resources + ): Promise<{ + reindex: boolean; + currentInferenceId: string | undefined; + nextInferenceId: string; + }> => { const client = await resources.service.getClient({ request: resources.request }); - - if (!client) { - throw notImplemented(); - } - - const { model_id: modelId } = resources.params?.query ?? {}; - - return await client.setupKnowledgeBase(modelId); + const { inference_id: inferenceId } = resources.params.query; + return client.setupKnowledgeBase(inferenceId); }, }); -const resetKnowledgeBase = createObservabilityAIAssistantServerRoute({ - endpoint: 'POST /internal/observability_ai_assistant/kb/reset', +const warmupModelKnowledgeBase = createObservabilityAIAssistantServerRoute({ + endpoint: 'POST /internal/observability_ai_assistant/kb/warmup_model', + params: t.type({ + query: t.type({ + inference_id: t.string, + }), + }), security: { authz: { requiredPrivileges: ['ai_assistant'], }, }, - handler: async (resources): Promise<{ result: string }> => { + handler: async (resources): Promise => { const client = await resources.service.getClient({ request: resources.request }); - - if (!client) { - throw notImplemented(); - } - - await client.resetKnowledgeBase(); - - return { result: 'success' }; + const { inference_id: inferenceId } = resources.params.query; + return client.warmupKbModel(inferenceId); }, }); const reIndexKnowledgeBase = createObservabilityAIAssistantServerRoute({ endpoint: 'POST /internal/observability_ai_assistant/kb/reindex', + params: t.type({ + query: t.type({ + inference_id: t.string, + }), + }), security: { authz: { requiredPrivileges: ['ai_assistant'], @@ -109,14 +105,14 @@ const reIndexKnowledgeBase = createObservabilityAIAssistantServerRoute({ }, handler: async (resources): Promise<{ result: boolean }> => { const client = await resources.service.getClient({ request: resources.request }); - const result = await client.reIndexKnowledgeBaseWithLock(); + const { inference_id: inferenceId } = resources.params.query; + const result = await client.reIndexKnowledgeBaseWithLock(inferenceId); return { result }; }, }); -const semanticTextMigrationKnowledgeBase = createObservabilityAIAssistantServerRoute({ - endpoint: - 'POST /internal/observability_ai_assistant/kb/migrations/populate_missing_semantic_text_field', +const startupMigrationsKnowledgeBase = createObservabilityAIAssistantServerRoute({ + endpoint: 'POST /internal/observability_ai_assistant/kb/migrations/startup', security: { authz: { requiredPrivileges: ['ai_assistant'], @@ -124,12 +120,27 @@ const semanticTextMigrationKnowledgeBase = createObservabilityAIAssistantServerR }, handler: async (resources): Promise => { const client = await resources.service.getClient({ request: resources.request }); + return client.runStartupMigrations(); + }, +}); - if (!client) { - throw notImplemented(); - } +const getKnowledgeBaseInferenceEndpoints = createObservabilityAIAssistantServerRoute({ + endpoint: 'GET /internal/observability_ai_assistant/kb/inference_endpoints', + security: { + authz: { + requiredPrivileges: ['ai_assistant'], + }, + }, + handler: async ( + resources + ): Promise<{ + endpoints: InferenceAPIConfigResponse[]; + }> => { + const client = await resources.service.getClient({ request: resources.request }); - return client.reIndexKnowledgeBaseAndPopulateSemanticTextField(); + return { + endpoints: await client.getInferenceEndpointsForEmbedding(), + }; }, }); @@ -147,10 +158,6 @@ const getKnowledgeBaseUserInstructions = createObservabilityAIAssistantServerRou }> => { const client = await resources.service.getClient({ request: resources.request }); - if (!client) { - throw notImplemented(); - } - return { userInstructions: await client.getKnowledgeBaseUserInstructions(), }; @@ -174,10 +181,6 @@ const saveKnowledgeBaseUserInstruction = createObservabilityAIAssistantServerRou handler: async (resources): Promise => { const client = await resources.service.getClient({ request: resources.request }); - if (!client) { - throw notImplemented(); - } - const { id, text, public: isPublic } = resources.params.body; return client.addUserInstruction({ entry: { id, text, public: isPublic }, @@ -205,14 +208,9 @@ const getKnowledgeBaseEntries = createObservabilityAIAssistantServerRoute({ entries: KnowledgeBaseEntry[]; }> => { const client = await resources.service.getClient({ request: resources.request }); - - if (!client) { - throw notImplemented(); - } - const { query, sortBy, sortDirection } = resources.params.query; - return await client.getKnowledgeBaseEntries({ query, sortBy, sortDirection }); + return client.getKnowledgeBaseEntries({ query, sortBy, sortDirection }); }, }); @@ -248,10 +246,6 @@ const saveKnowledgeBaseEntry = createObservabilityAIAssistantServerRoute({ handler: async (resources): Promise => { const client = await resources.service.getClient({ request: resources.request }); - if (!client) { - throw notImplemented(); - } - const entry = resources.params.body; return client.addKnowledgeBaseEntry({ entry: { @@ -280,11 +274,6 @@ const deleteKnowledgeBaseEntry = createObservabilityAIAssistantServerRoute({ }, handler: async (resources): Promise => { const client = await resources.service.getClient({ request: resources.request }); - - if (!client) { - throw notImplemented(); - } - return client.deleteKnowledgeBaseEntry(resources.params.path.entryId); }, }); @@ -304,41 +293,46 @@ const importKnowledgeBaseEntries = createObservabilityAIAssistantServerRoute({ handler: async (resources): Promise => { const client = await resources.service.getClient({ request: resources.request }); - if (!client) { - throw notImplemented(); - } + const { kbState } = await client.getKnowledgeBaseStatus(); - const { ready } = await client.getKnowledgeBaseStatus(); - if (!ready) { + if (kbState !== KnowledgeBaseState.READY) { throw new Error('Knowledge base is not ready'); } const limiter = pLimit(5); - const promises = resources.params.body.entries.map(async (entry) => { return limiter(async () => { - return client.addKnowledgeBaseEntry({ - entry: { - confidence: 'high', - is_correction: false, - public: true, - labels: {}, - role: KnowledgeBaseEntryRole.UserEntry, - ...entry, + return pRetry( + () => { + return client.addKnowledgeBaseEntry({ + entry: { + confidence: 'high', + is_correction: false, + public: true, + labels: {}, + role: KnowledgeBaseEntryRole.UserEntry, + ...entry, + }, + }); }, - }); + { retries: 10 } + ); }); }); await Promise.all(promises); + + resources.logger.info( + `Imported ${resources.params.body.entries.length} knowledge base entries` + ); }, }); export const knowledgeBaseRoutes = { ...reIndexKnowledgeBase, - ...semanticTextMigrationKnowledgeBase, + ...startupMigrationsKnowledgeBase, ...setupKnowledgeBase, - ...resetKnowledgeBase, + ...reIndexKnowledgeBase, ...getKnowledgeBaseStatus, ...getKnowledgeBaseEntries, ...saveKnowledgeBaseUserInstruction, @@ -346,4 +340,6 @@ export const knowledgeBaseRoutes = { ...getKnowledgeBaseUserInstructions, ...saveKnowledgeBaseEntry, ...deleteKnowledgeBaseEntry, + ...getKnowledgeBaseInferenceEndpoints, + ...warmupModelKnowledgeBase, }; diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/top_level/route.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/top_level/route.ts index b56b2e1f07bd2..7d0e387cf4048 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/top_level/route.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/routes/top_level/route.ts @@ -5,20 +5,35 @@ * 2.0. */ -import { createOrUpdateIndexAssets } from '../../service/startup_migrations/create_or_update_index_assets'; +import * as t from 'io-ts'; import { createObservabilityAIAssistantServerRoute } from '../create_observability_ai_assistant_server_route'; +import { createOrUpdateConversationIndexAssets } from '../../service/index_assets/create_or_update_conversation_index_assets'; +import { createOrUpdateKnowledgeBaseIndexAssets } from '../../service/index_assets/create_or_update_knowledge_base_index_assets'; const createOrUpdateIndexAssetsRoute = createObservabilityAIAssistantServerRoute({ endpoint: 'POST /internal/observability_ai_assistant/index_assets', + params: t.type({ + query: t.type({ + inference_id: t.string, + }), + }), security: { authz: { requiredPrivileges: ['ai_assistant'], }, }, handler: async (resources): Promise => { - return createOrUpdateIndexAssets({ + const { inference_id: inferenceId } = resources.params.query; + + await createOrUpdateConversationIndexAssets({ + logger: resources.logger, + core: resources.plugins.core.setup, + }); + + return createOrUpdateKnowledgeBaseIndexAssets({ logger: resources.logger, core: resources.plugins.core.setup, + inferenceId, }); }, }); diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/client/index.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/client/index.ts index 10447b8beb0f4..21722976c7dd9 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/client/index.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/client/index.ts @@ -32,6 +32,7 @@ import type { AssistantScope } from '@kbn/ai-assistant-common'; import type { InferenceClient } from '@kbn/inference-plugin/server'; import { ChatCompleteResponse, FunctionCallingMode, ToolChoiceType } from '@kbn/inference-common'; +import { LockAcquisitionError } from '@kbn/lock-manager'; import { resourceNames } from '..'; import { ChatCompletionChunkEvent, @@ -67,12 +68,15 @@ import { continueConversation } from './operators/continue_conversation'; import { convertInferenceEventsToStreamingEvents } from './operators/convert_inference_events_to_streaming_events'; import { extractMessages } from './operators/extract_messages'; import { getGeneratedTitle } from './operators/get_generated_title'; -import { populateMissingSemanticTextFieldMigration } from '../startup_migrations/populate_missing_semantic_text_field_migration'; +import { runStartupMigrations } from '../startup_migrations/run_startup_migrations'; import { ObservabilityAIAssistantPluginStartDependencies } from '../../types'; import { ObservabilityAIAssistantConfig } from '../../config'; -import { getElserModelId } from '../knowledge_base_service/get_elser_model_id'; import { apmInstrumentation } from './operators/apm_instrumentation'; +import { waitForKbModel, warmupModel } from '../inference_endpoint'; import { reIndexKnowledgeBaseWithLock } from '../knowledge_base_service/reindex_knowledge_base'; +import { populateMissingSemanticTextFieldWithLock } from '../startup_migrations/populate_missing_semantic_text_fields'; +import { createOrUpdateKnowledgeBaseIndexAssets } from '../index_assets/create_or_update_knowledge_base_index_assets'; +import { getInferenceIdFromWriteIndex } from '../knowledge_base_service/get_inference_id_from_write_index'; const MAX_FUNCTION_CALLS = 8; @@ -103,7 +107,7 @@ export class ObservabilityAIAssistantClient { conversationId: string ): Promise | undefined> => { const response = await this.dependencies.esClient.asInternalUser.search({ - index: resourceNames.aliases.conversations, + index: resourceNames.writeIndexAlias.conversations, query: { bool: { filter: [ @@ -535,7 +539,7 @@ export class ObservabilityAIAssistantClient { find = async (options?: { query?: string }): Promise => { const response = await this.dependencies.esClient.asInternalUser.search({ - index: resourceNames.aliases.conversations, + index: resourceNames.writeIndexAlias.conversations, allow_no_indices: true, query: { bool: { @@ -600,7 +604,7 @@ export class ObservabilityAIAssistantClient { ); await this.dependencies.esClient.asInternalUser.index({ - index: resourceNames.aliases.conversations, + index: resourceNames.writeIndexAlias.conversations, document: createdConversation, refresh: true, }); @@ -668,53 +672,111 @@ export class ObservabilityAIAssistantClient { ); }; + getInferenceEndpointsForEmbedding = () => { + return this.dependencies.knowledgeBaseService.getInferenceEndpointsForEmbedding(); + }; + getKnowledgeBaseStatus = () => { - return this.dependencies.knowledgeBaseService.getStatus(); + return this.dependencies.knowledgeBaseService.getModelStatus(); }; - setupKnowledgeBase = async (modelId: string | undefined) => { - const { esClient, core, logger, knowledgeBaseService } = this.dependencies; + setupKnowledgeBase = async ( + nextInferenceId: string + ): Promise<{ + reindex: boolean; + currentInferenceId: string | undefined; + nextInferenceId: string; + }> => { + const { esClient, core, logger } = this.dependencies; + + logger.debug(`Setting up knowledge base with inference_id: ${nextInferenceId}`); + + const currentInferenceId = await getInferenceIdFromWriteIndex(esClient).catch(() => { + logger.debug( + `Current KB write index does not have an inference_id. This is to be expected for indices created before 8.16` + ); + return undefined; + }); - if (!modelId) { - modelId = await getElserModelId({ core, logger }); + if (currentInferenceId === nextInferenceId) { + logger.debug('Inference ID is unchanged. No need to re-index knowledge base.'); + warmupModel({ esClient, logger, inferenceId: nextInferenceId }).catch(() => {}); + return { reindex: false, currentInferenceId, nextInferenceId }; } - // setup the knowledge base - const res = await knowledgeBaseService.setup(esClient, modelId); + await createOrUpdateKnowledgeBaseIndexAssets({ + core: this.dependencies.core, + logger: this.dependencies.logger, + inferenceId: nextInferenceId, + }); - populateMissingSemanticTextFieldMigration({ - core, + waitForKbModel({ + core: this.dependencies.core, + esClient, logger, config: this.dependencies.config, - }).catch((e) => { - this.dependencies.logger.error( - `Failed to populate missing semantic text fields: ${e.message}` - ); - }); + inferenceId: nextInferenceId, + }) + .then(async () => { + logger.info( + `Inference ID has changed from "${currentInferenceId}" to "${nextInferenceId}". Re-indexing knowledge base.` + ); + + await reIndexKnowledgeBaseWithLock({ + core, + logger, + esClient, + inferenceId: nextInferenceId, + }); + await populateMissingSemanticTextFieldWithLock({ + core, + logger, + config: this.dependencies.config, + esClient: this.dependencies.esClient, + }); + }) + .catch((e) => { + const isLockAcquisitionError = e instanceof LockAcquisitionError; + if (isLockAcquisitionError) { + logger.info(e.message); + } else { + logger.error( + `Failed to setup knowledge base with inference_id: ${nextInferenceId}. Error: ${e.message}` + ); + logger.debug(e); + } + }); - return res; + return { reindex: true, currentInferenceId, nextInferenceId }; }; - resetKnowledgeBase = () => { - const { esClient } = this.dependencies; - return this.dependencies.knowledgeBaseService.reset(esClient); + warmupKbModel = (inferenceId: string) => { + return waitForKbModel({ + core: this.dependencies.core, + esClient: this.dependencies.esClient, + logger: this.dependencies.logger, + config: this.dependencies.config, + inferenceId, + }); }; - reIndexKnowledgeBaseWithLock = () => { + reIndexKnowledgeBaseWithLock = (inferenceId: string) => { return reIndexKnowledgeBaseWithLock({ core: this.dependencies.core, esClient: this.dependencies.esClient, logger: this.dependencies.logger, + inferenceId, }); }; - reIndexKnowledgeBaseAndPopulateSemanticTextField = () => { - return populateMissingSemanticTextFieldMigration({ + runStartupMigrations = () => { + return runStartupMigrations({ core: this.dependencies.core, logger: this.dependencies.logger, config: this.dependencies.config, }); }; + addUserInstruction = async ({ entry, }: { diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index.ts index 62bf0ffb6c4e2..72c21eb05b343 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index.ts @@ -17,9 +17,9 @@ import { ObservabilityAIAssistantClient } from './client'; import { KnowledgeBaseService } from './knowledge_base_service'; import type { RegistrationCallback, RespondFunctionResources } from './types'; import { ObservabilityAIAssistantConfig } from '../config'; -import { createOrUpdateIndexAssets } from './startup_migrations/create_or_update_index_assets'; +import { createOrUpdateConversationIndexAssets } from './index_assets/create_or_update_conversation_index_assets'; -function getResourceName(resource: string) { +export function getResourceName(resource: string) { return `.kibana-observability-ai-assistant-${resource}`; } @@ -28,7 +28,7 @@ export const resourceNames = { conversations: getResourceName('component-template-conversations'), kb: getResourceName('component-template-kb'), }, - aliases: { + writeIndexAlias: { conversations: getResourceName('conversations'), kb: getResourceName('kb'), }, @@ -40,15 +40,15 @@ export const resourceNames = { conversations: getResourceName('index-template-conversations'), kb: getResourceName('index-template-kb'), }, - concreteIndexName: { + concreteWriteIndexName: { conversations: getResourceName('conversations-000001'), kb: getResourceName('kb-000001'), }, }; -const createIndexAssetsOnce = once( +const createConversationIndexAssetsOnce = once( (logger: Logger, core: CoreSetup) => - pRetry(() => createOrUpdateIndexAssets({ logger, core })) + pRetry(() => createOrUpdateConversationIndexAssets({ logger, core })) ); export class ObservabilityAIAssistantService { @@ -86,7 +86,7 @@ export class ObservabilityAIAssistantService { const [[coreStart, plugins]] = await Promise.all([ this.core.getStartServices(), - createIndexAssetsOnce(this.logger, this.core), + createConversationIndexAssetsOnce(this.logger, this.core), ]); // user will not be found when executed from system connector context diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index_assets/create_or_update_conversation_index_assets.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index_assets/create_or_update_conversation_index_assets.ts new file mode 100644 index 0000000000000..38f90865d4d91 --- /dev/null +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index_assets/create_or_update_conversation_index_assets.ts @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { createConcreteWriteIndex, getDataStreamAdapter } from '@kbn/alerting-plugin/server'; +import type { CoreSetup, Logger } from '@kbn/core/server'; +import type { ObservabilityAIAssistantPluginStartDependencies } from '../../types'; +import { conversationComponentTemplate } from './templates/conversation_component_template'; +import { resourceNames } from '..'; + +export async function createOrUpdateConversationIndexAssets({ + logger, + core, +}: { + logger: Logger; + core: CoreSetup; +}) { + try { + logger.debug('Setting up index assets'); + const [coreStart] = await core.getStartServices(); + const { asInternalUser } = coreStart.elasticsearch.client; + + // Conversations: component template + await asInternalUser.cluster.putComponentTemplate({ + create: false, + name: resourceNames.componentTemplate.conversations, + template: conversationComponentTemplate, + }); + + // Conversations: index template + await asInternalUser.indices.putIndexTemplate({ + name: resourceNames.indexTemplate.conversations, + composed_of: [resourceNames.componentTemplate.conversations], + create: false, + index_patterns: [resourceNames.indexPatterns.conversations], + template: { + settings: { + number_of_shards: 1, + auto_expand_replicas: '0-1', + hidden: true, + }, + }, + }); + + // Conversations: write index + const conversationAliasName = resourceNames.writeIndexAlias.conversations; + await createConcreteWriteIndex({ + esClient: asInternalUser, + logger, + totalFieldsLimit: 10000, + indexPatterns: { + alias: conversationAliasName, + pattern: `${conversationAliasName}*`, + basePattern: `${conversationAliasName}*`, + name: resourceNames.concreteWriteIndexName.conversations, + template: resourceNames.indexTemplate.conversations, + }, + dataStreamAdapter: getDataStreamAdapter({ useDataStreamForAlerts: false }), + }); + + logger.info('Successfully set up conversation index assets'); + } catch (error) { + logger.error(`Failed setting up conversation index assets: ${error.message}`); + logger.debug(error); + } +} diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index_assets/create_or_update_knowledge_base_index_assets.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index_assets/create_or_update_knowledge_base_index_assets.ts new file mode 100644 index 0000000000000..7358903685c19 --- /dev/null +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index_assets/create_or_update_knowledge_base_index_assets.ts @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import type { CoreSetup, Logger } from '@kbn/core/server'; +import { createConcreteWriteIndex, getDataStreamAdapter } from '@kbn/alerting-plugin/server'; +import type { ObservabilityAIAssistantPluginStartDependencies } from '../../types'; +import { getComponentTemplate } from './templates/kb_component_template'; +import { resourceNames } from '..'; + +export async function createOrUpdateKnowledgeBaseIndexAssets({ + logger, + core, + inferenceId, +}: { + logger: Logger; + core: CoreSetup; + inferenceId: string; +}) { + try { + logger.debug('Setting up knowledge base index assets'); + const [coreStart] = await core.getStartServices(); + const { asInternalUser } = coreStart.elasticsearch.client; + + // Knowledge base: component template + await asInternalUser.cluster.putComponentTemplate({ + create: false, + name: resourceNames.componentTemplate.kb, + template: getComponentTemplate(inferenceId), + }); + + // Knowledge base: index template + await asInternalUser.indices.putIndexTemplate({ + name: resourceNames.indexTemplate.kb, + composed_of: [resourceNames.componentTemplate.kb], + create: false, + index_patterns: [resourceNames.indexPatterns.kb], + template: { + settings: { + number_of_shards: 1, + auto_expand_replicas: '0-1', + hidden: true, + }, + }, + }); + + // Knowledge base: write index + const kbAliasName = resourceNames.writeIndexAlias.kb; + await createConcreteWriteIndex({ + esClient: asInternalUser, + logger, + totalFieldsLimit: 10000, + indexPatterns: { + alias: kbAliasName, + pattern: `${kbAliasName}*`, + basePattern: `${kbAliasName}*`, + name: resourceNames.concreteWriteIndexName.kb, + template: resourceNames.indexTemplate.kb, + }, + dataStreamAdapter: getDataStreamAdapter({ useDataStreamForAlerts: false }), + }); + + logger.info('Successfully set up knowledge base index assets'); + } catch (error) { + logger.error(`Failed setting up knowledge base index assets: ${error.message}`); + logger.debug(error); + } +} diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/conversation_component_template.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index_assets/templates/conversation_component_template.ts similarity index 100% rename from x-pack/platform/plugins/shared/observability_ai_assistant/server/service/conversation_component_template.ts rename to x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index_assets/templates/conversation_component_template.ts diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index_assets/templates/kb_component_template.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index_assets/templates/kb_component_template.ts new file mode 100644 index 0000000000000..104bda3840c6d --- /dev/null +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index_assets/templates/kb_component_template.ts @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { ClusterComponentTemplate } from '@elastic/elasticsearch/lib/api/types'; + +const keyword = { + type: 'keyword' as const, + ignore_above: 1024, +}; + +const text = { + type: 'text' as const, +}; + +const date = { + type: 'date' as const, +}; + +const dynamic = { + type: 'object' as const, + dynamic: true, +}; + +export function getComponentTemplate(inferenceId: string) { + const kbComponentTemplate: ClusterComponentTemplate['component_template']['template'] = { + mappings: { + dynamic: false, + properties: { + '@timestamp': date, + id: keyword, + doc_id: { type: 'text', fielddata: true }, // deprecated but kept for backwards compatibility + title: { + type: 'text', + fields: { + keyword: { + type: 'keyword', + ignore_above: 256, + }, + }, + }, + user: { + properties: { + id: keyword, + name: keyword, + }, + }, + type: keyword, + labels: dynamic, + conversation: { + properties: { + id: keyword, + title: text, + last_updated: date, + }, + }, + namespace: keyword, + text, + semantic_text: { + type: 'semantic_text', + inference_id: inferenceId, + }, + 'ml.tokens': { + type: 'rank_features', + }, + confidence: keyword, + is_correction: { + type: 'boolean', + }, + public: { + type: 'boolean', + }, + }, + }, + }; + + return kbComponentTemplate; +} diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index_assets/update_existing_index_assets.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index_assets/update_existing_index_assets.ts new file mode 100644 index 0000000000000..0212a72221d4e --- /dev/null +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/index_assets/update_existing_index_assets.ts @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import type { CoreSetup, Logger } from '@kbn/core/server'; +import type { ObservabilityAIAssistantPluginStartDependencies } from '../../types'; +import { createOrUpdateConversationIndexAssets } from './create_or_update_conversation_index_assets'; +import { createOrUpdateKnowledgeBaseIndexAssets } from './create_or_update_knowledge_base_index_assets'; +import { hasKbWriteIndex } from '../knowledge_base_service/has_kb_index'; +import { getInferenceIdFromWriteIndex } from '../knowledge_base_service/get_inference_id_from_write_index'; +import { resourceNames } from '..'; + +export const DEFAULT_INFERENCE_ENDPOINT = '.elser-2-elasticsearch'; + +export async function updateExistingIndexAssets({ + logger, + core, +}: { + logger: Logger; + core: CoreSetup; +}) { + const [coreStart] = await core.getStartServices(); + const esClient = coreStart.elasticsearch.client; + + const doesKbIndexExist = await hasKbWriteIndex({ esClient }); + + const doesConversationIndexExist = await esClient.asInternalUser.indices.exists({ + index: resourceNames.writeIndexAlias.conversations, + }); + + if (!doesKbIndexExist && !doesConversationIndexExist) { + logger.warn('Index assets do not exist. Aborting updating index assets'); + return; + } + + if (doesConversationIndexExist) { + logger.debug('Found index for conversations. Updating index assets.'); + await createOrUpdateConversationIndexAssets({ logger, core }); + } + + if (doesKbIndexExist) { + logger.debug('Found index for knowledge base. Updating index assets.'); + + const currentInferenceId = await getInferenceIdFromWriteIndex(esClient).catch(() => { + logger.debug( + `Current KB write index does not have an inference_id. This is to be expected for indices created before 8.16` + ); + return DEFAULT_INFERENCE_ENDPOINT; + }); + + await createOrUpdateKnowledgeBaseIndexAssets({ logger, core, inferenceId: currentInferenceId }); + } +} diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/inference_endpoint.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/inference_endpoint.ts index 4915570346b34..8b956fa096179 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/inference_endpoint.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/inference_endpoint.ts @@ -8,84 +8,71 @@ import { errors } from '@elastic/elasticsearch'; import { ElasticsearchClient } from '@kbn/core-elasticsearch-server'; import { Logger } from '@kbn/logging'; -import moment from 'moment'; -import pRetry from 'p-retry'; import { InferenceInferenceEndpointInfo, MlGetTrainedModelsStatsResponse, + MlTrainedModelStats, } from '@elastic/elasticsearch/lib/api/types'; +import { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils'; +import pRetry from 'p-retry'; +import { CoreSetup } from '@kbn/core/server'; +import { KnowledgeBaseState } from '../../common'; import { ObservabilityAIAssistantConfig } from '../config'; +import { + getConcreteWriteIndex, + getInferenceIdFromWriteIndex, +} from './knowledge_base_service/get_inference_id_from_write_index'; +import { isReIndexInProgress } from './knowledge_base_service/reindex_knowledge_base'; +import { ObservabilityAIAssistantPluginStartDependencies } from '../types'; -export const AI_ASSISTANT_KB_INFERENCE_ID = 'obs_ai_assistant_kb_inference'; +const SUPPORTED_TASK_TYPES = ['sparse_embedding', 'text_embedding']; -export async function createInferenceEndpoint({ +export const getInferenceEndpointsForEmbedding = async ({ esClient, logger, - modelId, }: { - esClient: { - asCurrentUser: ElasticsearchClient; - }; + esClient: { asInternalUser: ElasticsearchClient }; logger: Logger; - modelId: string; -}) { - try { - logger.debug(`Creating inference endpoint "${AI_ASSISTANT_KB_INFERENCE_ID}"`); - - return await esClient.asCurrentUser.inference.put( - { - inference_id: AI_ASSISTANT_KB_INFERENCE_ID, - task_type: 'sparse_embedding', - inference_config: { - service: 'elasticsearch', - service_settings: { - model_id: modelId, - adaptive_allocations: { enabled: true, min_number_of_allocations: 1 }, - num_threads: 1, - }, - task_settings: {}, - }, - }, - { - requestTimeout: moment.duration(2, 'minutes').asMilliseconds(), - } - ); - } catch (e) { - logger.debug( - `Failed to create inference endpoint "${AI_ASSISTANT_KB_INFERENCE_ID}": ${e.message}` - ); - throw e; +}): Promise<{ + inferenceEndpoints: InferenceAPIConfigResponse[]; +}> => { + const { endpoints } = await esClient.asInternalUser.inference.get({ + inference_id: '_all', + }); + + if (!endpoints.length) { + throw new Error('Did not find any inference endpoints'); } -} -export async function deleteInferenceEndpoint({ - esClient, -}: { - esClient: { - asCurrentUser: ElasticsearchClient; - }; -}) { - const response = await esClient.asCurrentUser.inference.delete({ - inference_id: AI_ASSISTANT_KB_INFERENCE_ID, - force: true, - }); + const embeddingEndpoints = endpoints.filter((endpoint) => + SUPPORTED_TASK_TYPES.includes(endpoint.task_type) + ); - return response; -} + if (!embeddingEndpoints.length) { + throw new Error('Did not find any inference endpoints for embedding'); + } + + logger.debug(`Found ${embeddingEndpoints.length} inference endpoints for supported task types`); + + return { + inferenceEndpoints: embeddingEndpoints as InferenceAPIConfigResponse[], + }; +}; -export async function getInferenceEndpoint({ +async function getInferenceEndpoint({ esClient, + inferenceId, }: { esClient: { asInternalUser: ElasticsearchClient }; + inferenceId: string; }) { const response = await esClient.asInternalUser.inference.get({ - inference_id: AI_ASSISTANT_KB_INFERENCE_ID, + inference_id: inferenceId, }); if (response.endpoints.length === 0) { throw new Error('Inference endpoint not found'); } - return response.endpoints[0]; } @@ -98,70 +85,160 @@ export function isInferenceEndpointMissingOrUnavailable(error: Error) { } export async function getKbModelStatus({ + core, esClient, logger, config, + inferenceId, }: { + core: CoreSetup; esClient: { asInternalUser: ElasticsearchClient }; logger: Logger; config: ObservabilityAIAssistantConfig; -}) { + inferenceId?: string; +}): Promise<{ + enabled: boolean; + endpoint?: InferenceInferenceEndpointInfo; + modelStats?: MlTrainedModelStats; + errorMessage?: string; + kbState: KnowledgeBaseState; + currentInferenceId: string | undefined; + concreteWriteIndex: string | undefined; + isReIndexing: boolean; +}> { const enabled = config.enableKnowledgeBase; + const concreteWriteIndex = await getConcreteWriteIndex(esClient); + const isReIndexing = await isReIndexInProgress({ esClient, logger, core }); + + const currentInferenceId = await getInferenceIdFromWriteIndex(esClient).catch(() => undefined); + if (!inferenceId) { + if (!currentInferenceId) { + logger.error('Inference id not provided and not found in write index'); + return { + enabled, + errorMessage: 'Inference id not found', + kbState: KnowledgeBaseState.NOT_INSTALLED, + currentInferenceId, + concreteWriteIndex, + isReIndexing, + }; + } + + logger.debug(`Using current inference id "${currentInferenceId}" from write index`); + inferenceId = currentInferenceId; + } let endpoint: InferenceInferenceEndpointInfo; try { - endpoint = await getInferenceEndpoint({ esClient }); + endpoint = await getInferenceEndpoint({ esClient, inferenceId }); + logger.debug( + `Inference endpoint "${inferenceId}" found with model id "${endpoint?.service_settings?.model_id}"` + ); } catch (error) { if (!isInferenceEndpointMissingOrUnavailable(error)) { throw error; } - return { ready: false, enabled, errorMessage: error.message }; + logger.error(`Inference endpoint "${inferenceId}" not found or unavailable: ${error.message}`); + + return { + enabled, + errorMessage: error.message, + kbState: KnowledgeBaseState.NOT_INSTALLED, + currentInferenceId, + concreteWriteIndex, + isReIndexing, + }; } + const modelId = endpoint?.service_settings?.model_id; let trainedModelStatsResponse: MlGetTrainedModelsStatsResponse; + try { trainedModelStatsResponse = await esClient.asInternalUser.ml.getTrainedModelsStats({ - model_id: endpoint.service_settings?.model_id, + model_id: modelId, }); } catch (error) { - logger.debug(`Failed to get model stats: ${error.message}`); - return { ready: false, enabled, errorMessage: error.message }; + logger.debug( + `Failed to get model stats for model "${modelId}" and inference id ${inferenceId}: ${error.message}` + ); + + return { + enabled, + endpoint, + errorMessage: error.message, + kbState: KnowledgeBaseState.NOT_INSTALLED, + currentInferenceId, + concreteWriteIndex, + isReIndexing, + }; } const modelStats = trainedModelStatsResponse.trained_model_stats.find( - (stats) => stats.deployment_stats?.deployment_id === AI_ASSISTANT_KB_INFERENCE_ID + (stats) => stats.deployment_stats?.deployment_id === inferenceId ); - const deploymentState = modelStats?.deployment_stats?.state; - const allocationState = modelStats?.deployment_stats?.allocation_status?.state; - const allocationCount = modelStats?.deployment_stats?.allocation_status?.allocation_count ?? 0; - const ready = - deploymentState === 'started' && allocationState === 'fully_allocated' && allocationCount > 0; + + let kbState: KnowledgeBaseState; + + if (trainedModelStatsResponse.trained_model_stats?.length && !modelStats) { + // model has been deployed at least once, but stopped later + kbState = KnowledgeBaseState.MODEL_PENDING_DEPLOYMENT; + } else if (modelStats?.deployment_stats?.state === 'failed') { + kbState = KnowledgeBaseState.ERROR; + } else if ( + modelStats?.deployment_stats?.state === 'starting' && + modelStats?.deployment_stats?.allocation_status?.allocation_count === 0 + ) { + kbState = KnowledgeBaseState.DEPLOYING_MODEL; + } else if ( + modelStats?.deployment_stats?.state === 'started' && + modelStats?.deployment_stats?.allocation_status?.state === 'fully_allocated' && + modelStats?.deployment_stats?.allocation_status?.allocation_count > 0 + ) { + kbState = KnowledgeBaseState.READY; + } else if ( + modelStats?.deployment_stats?.state === 'started' && + modelStats?.deployment_stats?.allocation_status?.state === 'fully_allocated' && + modelStats?.deployment_stats?.allocation_status?.allocation_count === 0 + ) { + // model has been scaled down due to inactivity + kbState = KnowledgeBaseState.MODEL_PENDING_ALLOCATION; + } else { + kbState = KnowledgeBaseState.ERROR; + } return { endpoint, - ready, enabled, - model_stats: { - allocation_count: allocationCount, - deployment_state: deploymentState, - allocation_state: allocationState, - }, + modelStats, + kbState, + currentInferenceId, + concreteWriteIndex, + isReIndexing, }; } export async function waitForKbModel({ + core, esClient, logger, config, + inferenceId, }: { + core: CoreSetup; esClient: { asInternalUser: ElasticsearchClient }; logger: Logger; config: ObservabilityAIAssistantConfig; + inferenceId: string; }) { + // Run a dummy inference to trigger the model to deploy + // This is a workaround for the fact that the model may not be deployed yet + await warmupModel({ esClient, logger, inferenceId }).catch(() => {}); + return pRetry( async () => { - const { ready } = await getKbModelStatus({ esClient, logger, config }); - if (!ready) { + const { kbState } = await getKbModelStatus({ core, esClient, logger, config, inferenceId }); + + if (kbState !== KnowledgeBaseState.READY) { logger.debug('Knowledge base model is not yet ready. Retrying...'); throw new Error('Knowledge base model is not yet ready'); } @@ -169,3 +246,25 @@ export async function waitForKbModel({ { retries: 30, factor: 2, maxTimeout: 30_000 } ); } + +export async function warmupModel({ + esClient, + logger, + inferenceId, +}: { + esClient: { asInternalUser: ElasticsearchClient }; + logger: Logger; + inferenceId: string; +}) { + logger.debug(`Running inference to trigger model deployment for "${inferenceId}"`); + await pRetry( + () => + esClient.asInternalUser.inference.inference({ + inference_id: inferenceId, + input: 'hello world', + }), + { retries: 10 } + ).catch((error) => { + logger.error(`Unable to run inference on endpoint "${inferenceId}": ${error.message}`); + }); +} diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/kb_component_template.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/kb_component_template.ts deleted file mode 100644 index 9307aa8443497..0000000000000 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/kb_component_template.ts +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { ClusterComponentTemplate } from '@elastic/elasticsearch/lib/api/types'; -import { AI_ASSISTANT_KB_INFERENCE_ID } from './inference_endpoint'; - -const keyword = { - type: 'keyword' as const, - ignore_above: 1024, -}; - -const text = { - type: 'text' as const, -}; - -const date = { - type: 'date' as const, -}; - -const dynamic = { - type: 'object' as const, - dynamic: true, -}; - -export const kbComponentTemplate: ClusterComponentTemplate['component_template']['template'] = { - mappings: { - dynamic: false, - properties: { - '@timestamp': date, - id: keyword, - doc_id: { type: 'text', fielddata: true }, // deprecated but kept for backwards compatibility - title: { - type: 'text', - fields: { - keyword: { - type: 'keyword', - ignore_above: 256, - }, - }, - }, - user: { - properties: { - id: keyword, - name: keyword, - }, - }, - type: keyword, - labels: dynamic, - conversation: { - properties: { - id: keyword, - title: text, - last_updated: date, - }, - }, - namespace: keyword, - text, - semantic_text: { - type: 'semantic_text', - inference_id: AI_ASSISTANT_KB_INFERENCE_ID, - }, - 'ml.tokens': { - type: 'rank_features', - }, - confidence: keyword, - is_correction: { - type: 'boolean', - }, - public: { - type: 'boolean', - }, - }, - }, -}; diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/create_knowledge_base_index.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/create_knowledge_base_index.ts new file mode 100644 index 0000000000000..796389bf20dfa --- /dev/null +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/create_knowledge_base_index.ts @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { ElasticsearchClient } from '@kbn/core-elasticsearch-server'; +import { errors } from '@elastic/elasticsearch'; +import { Logger } from '@kbn/logging'; + +export async function createKnowledgeBaseIndex({ + esClient, + logger, + inferenceId, + indexName, +}: { + esClient: { asInternalUser: ElasticsearchClient }; + logger: Logger; + inferenceId: string; + indexName: string; +}) { + logger.debug(`Creating knowledge base write index "${indexName}"`); + + try { + await esClient.asInternalUser.indices.create({ + index: indexName, + mappings: { + properties: { + semantic_text: { + type: 'semantic_text', + inference_id: inferenceId, + }, + }, + }, + }); + } catch (error) { + if ( + error instanceof errors.ResponseError && + error?.body?.error?.type === 'resource_already_exists_exception' + ) { + throw new Error( + `Write index "${indexName}" already exists. Please delete it before creating a new index.` + ); + } + throw error; + } +} diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/get_inference_id_from_write_index.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/get_inference_id_from_write_index.ts new file mode 100644 index 0000000000000..fd21893aab5e4 --- /dev/null +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/get_inference_id_from_write_index.ts @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { ElasticsearchClient } from '@kbn/core-elasticsearch-server'; +import { MappingSemanticTextProperty } from '@elastic/elasticsearch/lib/api/types'; +import { first } from 'lodash'; +import { resourceNames } from '..'; + +export async function getConcreteWriteIndex(esClient: { asInternalUser: ElasticsearchClient }) { + try { + const res = await esClient.asInternalUser.indices.getAlias({ + name: resourceNames.writeIndexAlias.kb, + }); + + return first(Object.keys(res)); + } catch (error) { + return; + } +} + +export async function getInferenceIdFromWriteIndex(esClient: { + asInternalUser: ElasticsearchClient; +}): Promise { + const response = await esClient.asInternalUser.indices.getMapping({ + index: resourceNames.writeIndexAlias.kb, + }); + + const [indexName, indexMappings] = Object.entries(response)[0]; + + const inferenceId = ( + indexMappings.mappings?.properties?.semantic_text as MappingSemanticTextProperty + )?.inference_id; + + if (!inferenceId) { + throw new Error(`inference_id not found in field mappings for index ${indexName}`); + } + + return inferenceId; +} diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/has_kb_index.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/has_kb_index.ts new file mode 100644 index 0000000000000..c8d586d8fe256 --- /dev/null +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/has_kb_index.ts @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { ElasticsearchClient } from '@kbn/core-elasticsearch-server'; + +import { resourceNames } from '..'; + +export async function hasKbWriteIndex({ + esClient, +}: { + esClient: { asInternalUser: ElasticsearchClient }; +}) { + return esClient.asInternalUser.indices.exists({ + index: resourceNames.writeIndexAlias.kb, + }); +} diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/index.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/index.ts index d666ee7aec832..112a6641a2a1d 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/index.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/index.ts @@ -22,19 +22,18 @@ import { getAccessQuery, getUserAccessFilters } from '../util/get_access_query'; import { getCategoryQuery } from '../util/get_category_query'; import { getSpaceQuery } from '../util/get_space_query'; import { - createInferenceEndpoint, - deleteInferenceEndpoint, + getInferenceEndpointsForEmbedding, getKbModelStatus, isInferenceEndpointMissingOrUnavailable, } from '../inference_endpoint'; import { recallFromSearchConnectors } from './recall_from_search_connectors'; import { ObservabilityAIAssistantPluginStartDependencies } from '../../types'; import { ObservabilityAIAssistantConfig } from '../../config'; -import { - isKnowledgeBaseIndexWriteBlocked, - isSemanticTextUnsupportedError, - reIndexKnowledgeBaseWithLock, -} from './reindex_knowledge_base'; +import { hasKbWriteIndex } from './has_kb_index'; +import { getInferenceIdFromWriteIndex } from './get_inference_id_from_write_index'; +import { reIndexKnowledgeBaseWithLock } from './reindex_knowledge_base'; +import { isSemanticTextUnsupportedError } from '../startup_migrations/run_startup_migrations'; +import { isKnowledgeBaseIndexWriteBlocked } from './index_write_block_utils'; interface Dependencies { core: CoreSetup; @@ -54,35 +53,13 @@ export interface RecalledEntry { labels?: Record; } -function throwKnowledgeBaseNotReady(body: any) { - throw serverUnavailable(`Knowledge base is not ready yet`, body); +function throwKnowledgeBaseNotReady(error: Error) { + throw serverUnavailable(`Knowledge base is not ready yet: ${error.message}`); } export class KnowledgeBaseService { constructor(private readonly dependencies: Dependencies) {} - async setup( - esClient: { - asCurrentUser: ElasticsearchClient; - asInternalUser: ElasticsearchClient; - }, - modelId: string - ) { - await deleteInferenceEndpoint({ esClient }).catch((e) => {}); // ensure existing inference endpoint is deleted - return createInferenceEndpoint({ esClient, logger: this.dependencies.logger, modelId }); - } - - async reset(esClient: { asCurrentUser: ElasticsearchClient }) { - try { - await deleteInferenceEndpoint({ esClient }); - } catch (error) { - if (isInferenceEndpointMissingOrUnavailable(error)) { - return; - } - throw error; - } - } - private async recallFromKnowledgeBase({ queries, categories, @@ -97,7 +74,7 @@ export class KnowledgeBaseService { const response = await this.dependencies.esClient.asInternalUser.search< Pick & { doc_id?: string } >({ - index: [resourceNames.aliases.kb], + index: [resourceNames.writeIndexAlias.kb], query: { bool: { should: queries.map(({ text, boost = 1 }) => ({ @@ -168,7 +145,7 @@ export class KnowledgeBaseService { namespace, }).catch((error) => { if (isInferenceEndpointMissingOrUnavailable(error)) { - throwKnowledgeBaseNotReady(error.body); + throwKnowledgeBaseNotReady(error); } throw error; }), @@ -227,9 +204,16 @@ export class KnowledgeBaseService { if (!this.dependencies.config.enableKnowledgeBase) { return []; } + + const doesKbIndexExist = await hasKbWriteIndex({ esClient: this.dependencies.esClient }); + + if (!doesKbIndexExist) { + return []; + } + try { const response = await this.dependencies.esClient.asInternalUser.search({ - index: resourceNames.aliases.kb, + index: resourceNames.writeIndexAlias.kb, query: { bool: { filter: [ @@ -277,7 +261,7 @@ export class KnowledgeBaseService { const response = await this.dependencies.esClient.asInternalUser.search< KnowledgeBaseEntry & { doc_id?: string } >({ - index: resourceNames.aliases.kb, + index: resourceNames.writeIndexAlias.kb, query: { bool: { filter: [ @@ -298,10 +282,7 @@ export class KnowledgeBaseService { }, sort: sortBy === 'title' - ? [ - { ['title.keyword']: { order: sortDirection } }, - { doc_id: { order: sortDirection } }, // sort by doc_id for backwards compatibility - ] + ? [{ ['title.keyword']: { order: sortDirection } }] : [{ [String(sortBy)]: { order: sortDirection } }], size: 500, _source: { @@ -332,12 +313,28 @@ export class KnowledgeBaseService { }; } catch (error) { if (isInferenceEndpointMissingOrUnavailable(error)) { - throwKnowledgeBaseNotReady(error.body); + throwKnowledgeBaseNotReady(error); } throw error; } }; + hasEntries = async () => { + const response = await this.dependencies.esClient.asInternalUser.search({ + index: resourceNames.writeIndexAlias.kb, + size: 0, + track_total_hits: 1, + terminate_after: 1, + }); + + const hitCount = + typeof response.hits.total === 'number' + ? response.hits.total + : response.hits.total?.value ?? 0; + + return hitCount > 0; + }; + getPersonalUserInstructionId = async ({ isPublic, user, @@ -351,7 +348,7 @@ export class KnowledgeBaseService { return null; } const res = await this.dependencies.esClient.asInternalUser.search({ - index: resourceNames.aliases.kb, + index: resourceNames.writeIndexAlias.kb, query: { bool: { filter: [ @@ -399,7 +396,7 @@ export class KnowledgeBaseService { const response = await this.dependencies.esClient.asInternalUser.search({ size: 1, - index: resourceNames.aliases.kb, + index: resourceNames.writeIndexAlias.kb, query, _source: false, }); @@ -424,7 +421,7 @@ export class KnowledgeBaseService { await this.dependencies.esClient.asInternalUser.index< Omit & { namespace: string } >({ - index: resourceNames.aliases.kb, + index: resourceNames.writeIndexAlias.kb, id, document: { '@timestamp': new Date().toISOString(), @@ -440,24 +437,27 @@ export class KnowledgeBaseService { } catch (error) { this.dependencies.logger.debug(`Failed to add entry to knowledge base ${error}`); if (isInferenceEndpointMissingOrUnavailable(error)) { - throwKnowledgeBaseNotReady(error.body); + throwKnowledgeBaseNotReady(error); } if (isSemanticTextUnsupportedError(error)) { + const inferenceId = await getInferenceIdFromWriteIndex(this.dependencies.esClient); + reIndexKnowledgeBaseWithLock({ core: this.dependencies.core, logger: this.dependencies.logger, esClient: this.dependencies.esClient, + inferenceId, }).catch((e) => { if (error instanceof LockAcquisitionError) { - this.dependencies.logger.debug(`Re-indexing operation is already in progress`); + this.dependencies.logger.info(`Re-indexing operation is already in progress`); return; } this.dependencies.logger.error(`Failed to re-index knowledge base: ${e.message}`); }); throw serverUnavailable( - `The index "${resourceNames.aliases.kb}" does not support semantic text and must be reindexed. This re-index operation has been scheduled and will be started automatically. Please try again later.` + `The index "${resourceNames.writeIndexAlias.kb}" does not support semantic text and must be reindexed. This re-index operation has been scheduled and will be started automatically. Please try again later.` ); } @@ -474,7 +474,7 @@ export class KnowledgeBaseService { deleteEntry = async ({ id }: { id: string }): Promise => { try { await this.dependencies.esClient.asInternalUser.delete({ - index: resourceNames.aliases.kb, + index: resourceNames.writeIndexAlias.kb, id, refresh: 'wait_for', }); @@ -482,17 +482,27 @@ export class KnowledgeBaseService { return Promise.resolve(); } catch (error) { if (isInferenceEndpointMissingOrUnavailable(error)) { - throwKnowledgeBaseNotReady(error.body); + throwKnowledgeBaseNotReady(error); } throw error; } }; - getStatus = async () => { + getModelStatus = async () => { return getKbModelStatus({ + core: this.dependencies.core, esClient: this.dependencies.esClient, logger: this.dependencies.logger, config: this.dependencies.config, }); }; + + getInferenceEndpointsForEmbedding = async () => { + const { inferenceEndpoints } = await getInferenceEndpointsForEmbedding({ + esClient: this.dependencies.esClient, + logger: this.dependencies.logger, + }); + + return inferenceEndpoints; + }; } diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/index_write_block_utils.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/index_write_block_utils.ts new file mode 100644 index 0000000000000..053240191e34d --- /dev/null +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/index_write_block_utils.ts @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { errors } from '@elastic/elasticsearch'; +import { ElasticsearchClient, Logger } from '@kbn/core/server'; +import pRetry from 'p-retry'; +import { resourceNames } from '..'; + +export async function addIndexWriteBlock({ + esClient, + index, +}: { + esClient: { asInternalUser: ElasticsearchClient }; + index: string; +}) { + await esClient.asInternalUser.indices.addBlock({ index, block: 'write' }); +} + +export function removeIndexWriteBlock({ + esClient, + index, +}: { + esClient: { asInternalUser: ElasticsearchClient }; + index: string; +}) { + return esClient.asInternalUser.indices.putSettings({ + index, + body: { 'index.blocks.write': false }, + }); +} + +export async function hasIndexWriteBlock({ + esClient, + index, +}: { + esClient: { asInternalUser: ElasticsearchClient }; + index: string; +}) { + const response = await esClient.asInternalUser.indices.getSettings({ index }); + const writeBlockSetting = Object.values(response)[0]?.settings?.index?.blocks?.write; + return writeBlockSetting === 'true' || writeBlockSetting === true; +} + +export async function waitForWriteBlockToBeRemoved({ + esClient, + logger, + index, +}: { + esClient: { asInternalUser: ElasticsearchClient }; + logger: Logger; + index: string; +}) { + return pRetry( + async () => { + const isBlocked = await hasIndexWriteBlock({ esClient, index }); + if (isBlocked) { + logger.debug(`Waiting for the write block to be removed from "${index}"...`); + throw new Error( + 'Waiting for the re-index operation to complete and the write block to be removed...' + ); + } + }, + { forever: true, maxTimeout: 10000 } + ); +} + +export function isKnowledgeBaseIndexWriteBlocked(error: any) { + return ( + error instanceof errors.ResponseError && + error.message.includes(`cluster_block_exception`) && + error.message.includes(resourceNames.writeIndexAlias.kb) + ); +} diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/reindex_knowledge_base.test.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/reindex_knowledge_base.test.ts new file mode 100644 index 0000000000000..3c2c630dbace6 --- /dev/null +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/reindex_knowledge_base.test.ts @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { getNextWriteIndexName } from './reindex_knowledge_base'; + +describe('getNextWriteIndexName', () => { + it('should return the next write index name', async () => { + expect(getNextWriteIndexName('.kibana-observability-ai-assistant-kb-000008')).toBe( + '.kibana-observability-ai-assistant-kb-000009' + ); + }); + + it('should return empty when input is empty', async () => { + expect(getNextWriteIndexName(undefined)).toBe(undefined); + }); + + it('should return empty when the sequence number is missing', async () => { + expect(getNextWriteIndexName('.kibana-observability-ai-assistant-kb')).toBe(undefined); + }); + + it('should return empty when the sequence number is not a number', async () => { + expect(getNextWriteIndexName('.kibana-observability-ai-assistant-kb-foobar')).toBe(undefined); + }); +}); diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/reindex_knowledge_base.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/reindex_knowledge_base.ts index 776f123e70f34..4f11d8cc0ad5f 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/reindex_knowledge_base.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/reindex_knowledge_base.ts @@ -5,109 +5,239 @@ * 2.0. */ -import { errors as EsErrors } from '@elastic/elasticsearch'; import { ElasticsearchClient } from '@kbn/core-elasticsearch-server'; import { Logger } from '@kbn/logging'; +import { last } from 'lodash'; +import pRetry from 'p-retry'; import { CoreSetup } from '@kbn/core/server'; import { LockManagerService } from '@kbn/lock-manager'; import { resourceNames } from '..'; -import { createKbConcreteIndex } from '../startup_migrations/create_or_update_index_assets'; import { ObservabilityAIAssistantPluginStartDependencies } from '../../types'; +import { + addIndexWriteBlock, + hasIndexWriteBlock, + removeIndexWriteBlock, +} from './index_write_block_utils'; +import { createKnowledgeBaseIndex } from './create_knowledge_base_index'; +import { updateKnowledgeBaseWriteIndexAlias } from './update_knowledge_base_index_alias'; export const KB_REINDEXING_LOCK_ID = 'observability_ai_assistant:kb_reindexing'; export async function reIndexKnowledgeBaseWithLock({ core, logger, esClient, + inferenceId, }: { core: CoreSetup; logger: Logger; esClient: { asInternalUser: ElasticsearchClient; }; + inferenceId: string; }): Promise { const lmService = new LockManagerService(core, logger); return lmService.withLock(KB_REINDEXING_LOCK_ID, () => - reIndexKnowledgeBase({ logger, esClient }) + reIndexKnowledgeBaseWithWriteIndexBlock({ + logger: logger.get('kb-reindex'), + esClient, + inferenceId, + }) ); } -async function reIndexKnowledgeBase({ +async function reIndexKnowledgeBaseWithWriteIndexBlock({ logger, esClient, + inferenceId, }: { logger: Logger; - esClient: { - asInternalUser: ElasticsearchClient; - }; + esClient: { asInternalUser: ElasticsearchClient }; + inferenceId: string; }): Promise { - logger.debug('Initiating knowledge base re-indexing...'); + logger.debug('Initializing re-indexing of knowledge base...'); + if (await hasIndexWriteBlock({ esClient, index: resourceNames.writeIndexAlias.kb })) { + throw new Error( + `Write block is already set on the knowledge base index: ${resourceNames.writeIndexAlias.kb}` + ); + } try { - const originalIndex = resourceNames.concreteIndexName.kb; - const tempIndex = `${resourceNames.aliases.kb}-000002`; - - // Create temporary index - logger.debug(`Creating temporary index "${tempIndex}"...`); - await esClient.asInternalUser.indices.delete({ index: tempIndex }, { ignore: [404] }); - await esClient.asInternalUser.indices.create({ index: tempIndex }); - - // Perform reindex to temporary index - logger.debug(`Re-indexing knowledge base to temporary index "${tempIndex}"...`); - await esClient.asInternalUser.reindex({ - body: { - source: { index: originalIndex }, - dest: { index: tempIndex }, - }, - refresh: true, - wait_for_completion: true, - }); - - // Delete and re-create original index - logger.debug(`Deleting original index "${originalIndex}" and re-creating it...`); - await esClient.asInternalUser.indices.delete({ index: originalIndex }); - await createKbConcreteIndex({ logger, esClient }); - - // Perform reindex back to original index - logger.debug(`Re-indexing knowledge base back to original index "${originalIndex}"...`); - await esClient.asInternalUser.reindex({ - body: { - source: { index: tempIndex }, - dest: { index: originalIndex }, - }, - refresh: true, - wait_for_completion: true, - }); - - // Delete temporary index - logger.debug(`Deleting temporary index "${tempIndex}"...`); - await esClient.asInternalUser.indices.delete({ index: tempIndex }); - - logger.info('Re-indexing knowledge base completed successfully'); - return true; + await addIndexWriteBlock({ esClient, index: resourceNames.writeIndexAlias.kb }); + await reIndexKnowledgeBase({ logger, esClient, inferenceId }); + logger.info('Re-indexing knowledge base completed successfully.'); } catch (error) { - throw new Error(`Failed to re-index knowledge base: ${error.message}`); + logger.error(`Re-indexing knowledge base failed: ${error.message}`); + throw error; + } finally { + await removeIndexWriteBlock({ esClient, index: resourceNames.writeIndexAlias.kb }); } + + return true; } -export function isKnowledgeBaseIndexWriteBlocked(error: any) { - return ( - error instanceof EsErrors.ResponseError && - error.message.includes( - `cluster_block_exception: index [${resourceNames.concreteIndexName.kb}] blocked` - ) +async function reIndexKnowledgeBase({ + logger, + esClient, + inferenceId, +}: { + logger: Logger; + esClient: { asInternalUser: ElasticsearchClient }; + inferenceId: string; +}): Promise { + const activeReindexingTask = await getActiveReindexingTaskId(esClient); + if (activeReindexingTask) { + throw new Error( + `Re-indexing task "${activeReindexingTask}" is already in progress for the knowledge base index: ${resourceNames.writeIndexAlias.kb}` + ); + } + + const { currentWriteIndexName, nextWriteIndexName } = await getCurrentAndNextWriteIndexNames({ + esClient, + logger, + }); + + await createKnowledgeBaseIndex({ esClient, logger, inferenceId, indexName: nextWriteIndexName }); + + logger.info( + `Re-indexing knowledge base from "${currentWriteIndexName}" to index "${nextWriteIndexName}"...` ); + + const reindexResponse = await esClient.asInternalUser.reindex({ + source: { index: currentWriteIndexName }, + dest: { index: nextWriteIndexName }, + refresh: true, + wait_for_completion: false, + }); + + // Point write index alias to the new index + await updateKnowledgeBaseWriteIndexAlias({ + esClient, + logger, + nextWriteIndexName, + currentWriteIndexName, + }); + + const taskId = reindexResponse.task?.toString(); + if (taskId) { + await waitForReIndexTaskToComplete({ esClient, taskId, logger }); + } else { + throw new Error(`ID for re-indexing task was not found`); + } + + // Delete original index + logger.debug(`Deleting write index "${currentWriteIndexName}"`); + await esClient.asInternalUser.indices.delete({ index: currentWriteIndexName }); } -export function isSemanticTextUnsupportedError(error: Error) { - const semanticTextUnsupportedError = - 'The [sparse_vector] field type is not supported on indices created on versions 8.0 to 8.10'; +async function getCurrentWriteIndexName(esClient: { asInternalUser: ElasticsearchClient }) { + const response = await esClient.asInternalUser.indices.getAlias( + { name: resourceNames.writeIndexAlias.kb }, + { ignore: [404] } + ); + + const currentWriteIndexName = Object.entries(response).find( + ([, aliasInfo]) => aliasInfo.aliases[resourceNames.writeIndexAlias.kb]?.is_write_index + )?.[0]; + + return currentWriteIndexName; +} + +export function getNextWriteIndexName(currentWriteIndexName: string | undefined) { + if (!currentWriteIndexName) { + return; + } + + const latestIndexNumber = last(currentWriteIndexName.split('-')); + if (!latestIndexNumber) { + return; + } + + // sequence number must be a six digit zero padded number like 000008 or 002201 + const isSequenceNumberValid = /^\d{6}$/.test(latestIndexNumber); + if (!isSequenceNumberValid) { + return; + } + + const nextIndexSequenceNumber = (parseInt(latestIndexNumber, 10) + 1).toString().padStart(6, '0'); + return `${resourceNames.writeIndexAlias.kb}-${nextIndexSequenceNumber}`; +} + +async function getCurrentAndNextWriteIndexNames({ + esClient, + logger, +}: { + esClient: { asInternalUser: ElasticsearchClient }; + logger: Logger; +}) { + const currentWriteIndexName = await getCurrentWriteIndexName(esClient); + const nextWriteIndexName = getNextWriteIndexName(currentWriteIndexName); + if (!currentWriteIndexName || !nextWriteIndexName) { + throw new Error( + `"${currentWriteIndexName}" is not a valid write index name. Skipping re-indexing of knowledge base.` + ); + } + + return { currentWriteIndexName, nextWriteIndexName }; +} + +export async function getActiveReindexingTaskId(esClient: { asInternalUser: ElasticsearchClient }) { + const response = await esClient.asInternalUser.tasks.list({ + detailed: true, + actions: ['indices:data/write/reindex'], + }); + + for (const node of Object.values(response.nodes ?? {})) { + for (const [taskId, task] of Object.entries(node.tasks)) { + if (task.description?.includes(resourceNames.writeIndexAlias.kb)) { + return taskId; + } + } + } +} + +async function waitForReIndexTaskToComplete({ + esClient, + taskId, + logger, +}: { + esClient: { asInternalUser: ElasticsearchClient }; + taskId: string; + logger: Logger; +}): Promise { + return pRetry( + async () => { + const taskResponse = await esClient.asInternalUser.tasks.get({ + task_id: taskId, + wait_for_completion: false, + }); + + if (!taskResponse.completed) { + logger.debug(`Waiting for re-indexing task "${taskId}" to complete...`); + throw new Error(`Waiting for re-indexing task "${taskId}" to complete...`); + } + }, + { forever: true, maxTimeout: 10000 } + ); +} + +export async function isReIndexInProgress({ + esClient, + logger, + core, +}: { + esClient: { asInternalUser: ElasticsearchClient }; + logger: Logger; + core: CoreSetup; +}) { + const lmService = new LockManagerService(core, logger); + + const [lock, activeReindexingTask] = await Promise.all([ + lmService.getLock(KB_REINDEXING_LOCK_ID), + getActiveReindexingTaskId(esClient), + ]); - const isSemanticTextUnspported = - error instanceof EsErrors.ResponseError && - (error.message.includes(semanticTextUnsupportedError) || - // @ts-expect-error - error.meta?.body?.error?.caused_by?.reason.includes(semanticTextUnsupportedError)); + logger.debug(`Lock: ${!!lock}`); + logger.debug(`ES re-indexing task: ${!!activeReindexingTask}`); - return isSemanticTextUnspported; + return lock !== undefined || activeReindexingTask !== undefined; } diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/update_knowledge_base_index_alias.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/update_knowledge_base_index_alias.ts new file mode 100644 index 0000000000000..cc9c665430932 --- /dev/null +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/knowledge_base_service/update_knowledge_base_index_alias.ts @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { ElasticsearchClient } from '@kbn/core-elasticsearch-server'; +import { Logger } from '@kbn/logging'; +import { resourceNames } from '..'; + +export async function updateKnowledgeBaseWriteIndexAlias({ + esClient, + logger, + nextWriteIndexName, + currentWriteIndexName, +}: { + esClient: { asInternalUser: ElasticsearchClient }; + logger: Logger; + nextWriteIndexName: string; + currentWriteIndexName: string; +}) { + logger.debug( + `Updating write index alias from "${currentWriteIndexName}" to "${nextWriteIndexName}"` + ); + const alias = resourceNames.writeIndexAlias.kb; + await esClient.asInternalUser.indices.updateAliases({ + actions: [ + { remove: { index: currentWriteIndexName, alias } }, + { add: { index: nextWriteIndexName, alias, is_write_index: true } }, + ], + }); +} diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/startup_migrations/create_or_update_index_assets.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/startup_migrations/create_or_update_index_assets.ts deleted file mode 100644 index f416dd1d0292a..0000000000000 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/startup_migrations/create_or_update_index_assets.ts +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { createConcreteWriteIndex, getDataStreamAdapter } from '@kbn/alerting-plugin/server'; -import type { CoreSetup, ElasticsearchClient, Logger } from '@kbn/core/server'; -import type { ObservabilityAIAssistantPluginStartDependencies } from '../../types'; -import { conversationComponentTemplate } from '../conversation_component_template'; -import { kbComponentTemplate } from '../kb_component_template'; -import { resourceNames } from '..'; - -export async function updateExistingIndexAssets({ - logger, - core, -}: { - logger: Logger; - core: CoreSetup; -}) { - const [coreStart] = await core.getStartServices(); - const { asInternalUser } = coreStart.elasticsearch.client; - - const hasKbIndex = await asInternalUser.indices.exists({ - index: resourceNames.aliases.kb, - }); - - const hasConversationIndex = await asInternalUser.indices.exists({ - index: resourceNames.aliases.conversations, - }); - - if (!hasKbIndex && !hasConversationIndex) { - logger.debug('Index assets do not exist. Aborting updating index assets'); - return; - } - - await createOrUpdateIndexAssets({ logger, core }); -} - -export async function createOrUpdateIndexAssets({ - logger, - core, -}: { - logger: Logger; - core: CoreSetup; -}) { - try { - logger.debug('Setting up index assets'); - const [coreStart] = await core.getStartServices(); - const { asInternalUser } = coreStart.elasticsearch.client; - - // Conversations: component template - await asInternalUser.cluster.putComponentTemplate({ - create: false, - name: resourceNames.componentTemplate.conversations, - template: conversationComponentTemplate, - }); - - // Conversations: index template - await asInternalUser.indices.putIndexTemplate({ - name: resourceNames.indexTemplate.conversations, - composed_of: [resourceNames.componentTemplate.conversations], - create: false, - index_patterns: [resourceNames.indexPatterns.conversations], - template: { - settings: { - number_of_shards: 1, - auto_expand_replicas: '0-1', - hidden: true, - }, - }, - }); - - // Conversations: write index - const conversationAliasName = resourceNames.aliases.conversations; - await createConcreteWriteIndex({ - esClient: asInternalUser, - logger, - totalFieldsLimit: 10000, - indexPatterns: { - alias: conversationAliasName, - pattern: `${conversationAliasName}*`, - basePattern: `${conversationAliasName}*`, - name: resourceNames.concreteIndexName.conversations, - template: resourceNames.indexTemplate.conversations, - }, - dataStreamAdapter: getDataStreamAdapter({ useDataStreamForAlerts: false }), - }); - - // Knowledge base: component template - await asInternalUser.cluster.putComponentTemplate({ - create: false, - name: resourceNames.componentTemplate.kb, - template: kbComponentTemplate, - }); - - // Knowledge base: index template - await asInternalUser.indices.putIndexTemplate({ - name: resourceNames.indexTemplate.kb, - composed_of: [resourceNames.componentTemplate.kb], - create: false, - index_patterns: [resourceNames.indexPatterns.kb], - template: { - settings: { - number_of_shards: 1, - auto_expand_replicas: '0-1', - hidden: true, - 'index.mapping.semantic_text.use_legacy_format': false, - }, - }, - }); - - // Knowledge base: write index - await createKbConcreteIndex({ logger, esClient: coreStart.elasticsearch.client }); - - logger.info('Successfully set up index assets'); - } catch (error) { - logger.error(`Failed setting up index assets: ${error.message}`); - logger.debug(error); - } -} - -export async function createKbConcreteIndex({ - logger, - esClient, -}: { - logger: Logger; - esClient: { - asInternalUser: ElasticsearchClient; - }; -}) { - const kbAliasName = resourceNames.aliases.kb; - return createConcreteWriteIndex({ - esClient: esClient.asInternalUser, - logger, - totalFieldsLimit: 10000, - indexPatterns: { - alias: kbAliasName, - pattern: `${kbAliasName}*`, - basePattern: `${kbAliasName}*`, - name: resourceNames.concreteIndexName.kb, - template: resourceNames.indexTemplate.kb, - }, - dataStreamAdapter: getDataStreamAdapter({ useDataStreamForAlerts: false }), - }); -} diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/startup_migrations/populate_missing_semantic_text_field_migration.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/startup_migrations/populate_missing_semantic_text_field_migration.ts deleted file mode 100644 index 70bfbbe6a9c6a..0000000000000 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/startup_migrations/populate_missing_semantic_text_field_migration.ts +++ /dev/null @@ -1,183 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { ElasticsearchClient } from '@kbn/core-elasticsearch-server'; -import pLimit from 'p-limit'; -import type { CoreSetup, Logger } from '@kbn/core/server'; -import { uniq } from 'lodash'; -import pRetry from 'p-retry'; -import { LockAcquisitionError, LockManagerService } from '@kbn/lock-manager'; -import { KnowledgeBaseEntry } from '../../../common'; -import { resourceNames } from '..'; -import { waitForKbModel } from '../inference_endpoint'; -import { ObservabilityAIAssistantPluginStartDependencies } from '../../types'; -import { ObservabilityAIAssistantConfig } from '../../config'; -import { reIndexKnowledgeBaseWithLock } from '../knowledge_base_service/reindex_knowledge_base'; - -const PLUGIN_STARTUP_LOCK_ID = 'observability_ai_assistant:startup_migrations'; - -// This function populates the `semantic_text` field for knowledge base entries during the plugin's startup process. -// It ensures all missing fields are updated in batches and uses a distributed lock to prevent conflicts in distributed environments. -// If the knowledge base index does not support the `semantic_text` field, it is re-indexed. -export async function populateMissingSemanticTextFieldMigration({ - core, - logger, - config, -}: { - core: CoreSetup; - logger: Logger; - config: ObservabilityAIAssistantConfig; -}) { - const [coreStart] = await core.getStartServices(); - const esClient = coreStart.elasticsearch.client; - - const lmService = new LockManagerService(core, logger); - await lmService - .withLock(PLUGIN_STARTUP_LOCK_ID, async () => { - const hasKbIndex = await esClient.asInternalUser.indices.exists({ - index: resourceNames.aliases.kb, - }); - - if (!hasKbIndex) { - logger.debug('Knowledge base index does not exist. Aborting updating index assets'); - return; - } - - const isKbSemanticTextCompatible = await isKnowledgeBaseSemanticTextCompatible({ - logger, - esClient, - }); - - if (!isKbSemanticTextCompatible) { - await reIndexKnowledgeBaseWithLock({ core, logger, esClient }); - } - - await pRetry( - async () => populateMissingSemanticTextFieldRecursively({ esClient, logger, config }), - { retries: 5, minTimeout: 10_000 } - ); - }) - .catch((error) => { - if (!(error instanceof LockAcquisitionError)) { - throw error; - } - }); -} - -// Ensures that every doc has populated the `semantic_text` field. -// It retrieves entries without the field, updates them in batches, and continues until no entries remain. -async function populateMissingSemanticTextFieldRecursively({ - esClient, - logger, - config, -}: { - esClient: { asInternalUser: ElasticsearchClient }; - logger: Logger; - config: ObservabilityAIAssistantConfig; -}) { - logger.debug( - 'Checking for remaining entries without semantic_text field that need to be migrated' - ); - - const response = await esClient.asInternalUser.search({ - size: 100, - track_total_hits: true, - index: [resourceNames.aliases.kb], - query: { - bool: { - must_not: { - exists: { - field: 'semantic_text', - }, - }, - }, - }, - _source: { - excludes: ['ml.tokens'], - }, - }); - - if (response.hits.hits.length === 0) { - logger.debug('No remaining entries to migrate'); - return; - } - - await waitForKbModel({ esClient, logger, config }); - - const indicesWithOutdatedEntries = uniq(response.hits.hits.map((hit) => hit._index)); - logger.debug( - `Found ${response.hits.hits.length} entries without semantic_text field in "${indicesWithOutdatedEntries}". Updating now...` - ); - - // Limit the number of concurrent requests to avoid overloading the cluster - const limiter = pLimit(20); - const promises = response.hits.hits.map((hit) => { - return limiter(() => { - if (!hit._source || !hit._id) { - return; - } - - return esClient.asInternalUser.update({ - refresh: 'wait_for', - index: resourceNames.aliases.kb, - id: hit._id, - doc: { - ...hit._source, - semantic_text: hit._source.text ?? 'No text', - }, - }); - }); - }); - - await Promise.all(promises); - logger.debug(`Updated ${promises.length} entries`); - - await sleep(100); - await populateMissingSemanticTextFieldRecursively({ esClient, logger, config }); -} - -async function sleep(ms: number) { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -// Checks if the knowledge base index supports `semantic_text` -// If the index was created before version 8.11, it requires re-indexing to support the `semantic_text` field. -async function isKnowledgeBaseSemanticTextCompatible({ - logger, - esClient, -}: { - logger: Logger; - esClient: { asInternalUser: ElasticsearchClient }; -}): Promise { - const indexSettingsResponse = await esClient.asInternalUser.indices.getSettings({ - index: resourceNames.aliases.kb, - }); - - const results = Object.entries(indexSettingsResponse); - if (results.length === 0) { - logger.debug('No knowledge base indices found. Skipping re-indexing.'); - return true; - } - - const [indexName, { settings }] = results[0]; - const createdVersion = parseInt(settings?.index?.version?.created ?? '', 10); - - // Check if the index was created before version 8.11 - const versionThreshold = 8110000; // Version 8.11.0 - if (createdVersion >= versionThreshold) { - logger.debug( - `Knowledge base index "${indexName}" was created in version ${createdVersion}, and does not require re-indexing. Semantic text field is already supported. Aborting` - ); - return true; - } - - logger.info( - `Knowledge base index was created in ${createdVersion} and must be re-indexed in order to support semantic_text field. Re-indexing now...` - ); - - return false; -} diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/startup_migrations/populate_missing_semantic_text_fields.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/startup_migrations/populate_missing_semantic_text_fields.ts new file mode 100644 index 0000000000000..119fbf83e72ea --- /dev/null +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/startup_migrations/populate_missing_semantic_text_fields.ts @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { ElasticsearchClient } from '@kbn/core-elasticsearch-server'; +import pLimit from 'p-limit'; +import type { CoreSetup, Logger } from '@kbn/core/server'; +import { uniq } from 'lodash'; +import { LockManagerService } from '@kbn/lock-manager'; +import { KnowledgeBaseEntry } from '../../../common'; +import { resourceNames } from '..'; +import { waitForKbModel } from '../inference_endpoint'; +import { ObservabilityAIAssistantPluginStartDependencies } from '../../types'; +import { ObservabilityAIAssistantConfig } from '../../config'; +import { sleep } from '../util/sleep'; +import { getInferenceIdFromWriteIndex } from '../knowledge_base_service/get_inference_id_from_write_index'; + +const POPULATE_MISSING_SEMANTIC_TEXT_FIELDS_LOCK_ID = 'populate_missing_semantic_text_fields'; +export async function populateMissingSemanticTextFieldWithLock({ + core, + logger, + config, + esClient, +}: { + core: CoreSetup; + logger: Logger; + config: ObservabilityAIAssistantConfig; + esClient: { asInternalUser: ElasticsearchClient }; +}) { + const lmService = new LockManagerService(core, logger); + await lmService.withLock(POPULATE_MISSING_SEMANTIC_TEXT_FIELDS_LOCK_ID, async () => + populateMissingSemanticTextFieldRecursively({ core, esClient, logger, config }) + ); +} + +// Ensures that every doc has populated the `semantic_text` field. +// It retrieves entries without the field, updates them in batches, and continues until no entries remain. +async function populateMissingSemanticTextFieldRecursively({ + core, + esClient, + logger, + config, +}: { + core: CoreSetup; + esClient: { asInternalUser: ElasticsearchClient }; + logger: Logger; + config: ObservabilityAIAssistantConfig; +}) { + logger.debug( + 'Checking for remaining entries without semantic_text field that need to be migrated' + ); + + const response = await esClient.asInternalUser.search({ + size: 100, + track_total_hits: true, + index: [resourceNames.writeIndexAlias.kb], + query: { + bool: { + must_not: { + exists: { + field: 'semantic_text', + }, + }, + }, + }, + _source: { + excludes: ['ml.tokens'], + }, + }); + + if (response.hits.hits.length === 0) { + logger.debug('No remaining entries to migrate'); + return; + } + + const inferenceId = await getInferenceIdFromWriteIndex(esClient); + await waitForKbModel({ core, esClient, logger, config, inferenceId }); + + const indicesWithOutdatedEntries = uniq(response.hits.hits.map((hit) => hit._index)); + logger.debug( + `Found ${response.hits.hits.length} entries without semantic_text field in "${indicesWithOutdatedEntries}". Updating now...` + ); + + // Limit the number of concurrent requests to avoid overloading the cluster + const limiter = pLimit(20); + const promises = response.hits.hits.map((hit) => { + return limiter(() => { + if (!hit._source || !hit._id) { + return; + } + + return esClient.asInternalUser.update({ + refresh: 'wait_for', + index: resourceNames.writeIndexAlias.kb, + id: hit._id, + doc: { + ...hit._source, + semantic_text: hit._source.text ?? 'No text', + }, + }); + }); + }); + + await Promise.all(promises); + logger.debug(`Updated ${promises.length} entries`); + + await sleep(100); + await populateMissingSemanticTextFieldRecursively({ core, esClient, logger, config }); +} diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/startup_migrations/run_startup_migrations.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/startup_migrations/run_startup_migrations.ts new file mode 100644 index 0000000000000..9fd8e79a8b736 --- /dev/null +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/startup_migrations/run_startup_migrations.ts @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { ElasticsearchClient } from '@kbn/core-elasticsearch-server'; +import type { CoreSetup, Logger } from '@kbn/core/server'; +import pRetry from 'p-retry'; +import { errors } from '@elastic/elasticsearch'; +import { LockAcquisitionError, LockManagerService } from '@kbn/lock-manager'; +import { resourceNames } from '..'; +import { ObservabilityAIAssistantPluginStartDependencies } from '../../types'; +import { ObservabilityAIAssistantConfig } from '../../config'; +import { reIndexKnowledgeBaseWithLock } from '../knowledge_base_service/reindex_knowledge_base'; +import { populateMissingSemanticTextFieldWithLock } from './populate_missing_semantic_text_fields'; +import { hasKbWriteIndex } from '../knowledge_base_service/has_kb_index'; +import { getInferenceIdFromWriteIndex } from '../knowledge_base_service/get_inference_id_from_write_index'; +import { updateExistingIndexAssets } from '../index_assets/update_existing_index_assets'; + +const PLUGIN_STARTUP_LOCK_ID = 'observability_ai_assistant:startup_migrations'; + +// This function performs necessary startup migrations for the observability AI assistant: +// 1. Updates index assets to ensure mappings are correct +// 2. If the knowledge base index does not support the `semantic_text` field, it is re-indexed. +// 3. Populates the `semantic_text` field for knowledge base entries +export async function runStartupMigrations({ + core, + logger, + config, +}: { + core: CoreSetup; + logger: Logger; + config: ObservabilityAIAssistantConfig; +}) { + // update index assets to ensure mappings are correct + await updateExistingIndexAssets({ logger, core }); + + const [coreStart] = await core.getStartServices(); + const esClient = coreStart.elasticsearch.client; + + const lmService = new LockManagerService(core, logger); + await lmService + .withLock(PLUGIN_STARTUP_LOCK_ID, async () => { + const doesKbIndexExist = await hasKbWriteIndex({ esClient }); + + if (!doesKbIndexExist) { + logger.info('Knowledge base index does not exist. Aborting updating index assets'); + return; + } + + const isKbSemanticTextCompatible = await isKnowledgeBaseSemanticTextCompatible({ + logger, + esClient, + }); + + if (!isKbSemanticTextCompatible) { + const inferenceId = await getInferenceIdFromWriteIndex(esClient); + await reIndexKnowledgeBaseWithLock({ core, logger, esClient, inferenceId }); + } + + await pRetry( + async () => populateMissingSemanticTextFieldWithLock({ core, logger, config, esClient }), + { + retries: 5, + minTimeout: 10_000, + onFailedAttempt: async (error) => { + const isLockAcquisitionError = error instanceof LockAcquisitionError; + if (!isLockAcquisitionError) { + throw error; + } + }, + } + ); + }) + .catch((error) => { + const isLockAcquisitionError = error instanceof LockAcquisitionError; + if (!isLockAcquisitionError) { + throw error; + } + }); +} + +// Checks if the knowledge base index supports `semantic_text` +// If the index was created before version 8.11, it requires re-indexing to support the `semantic_text` field. +async function isKnowledgeBaseSemanticTextCompatible({ + logger, + esClient, +}: { + logger: Logger; + esClient: { asInternalUser: ElasticsearchClient }; +}): Promise { + const indexSettingsResponse = await esClient.asInternalUser.indices.getSettings({ + index: resourceNames.writeIndexAlias.kb, + }); + + const results = Object.entries(indexSettingsResponse); + if (results.length === 0) { + logger.debug('No knowledge base indices found. Skipping re-indexing.'); + return true; + } + + const [indexName, { settings }] = results[0]; + const createdVersion = parseInt(settings?.index?.version?.created ?? '', 10); + + // Check if the index was created before version 8.11 + const versionThreshold = 8110000; // Version 8.11.0 + if (createdVersion >= versionThreshold) { + logger.debug( + `Knowledge base index "${indexName}" was created in version ${createdVersion}, and does not require re-indexing. Semantic text field is already supported. Aborting` + ); + return true; + } + + logger.info( + `Knowledge base index was created in ${createdVersion} and must be re-indexed in order to support semantic_text field. Re-indexing now...` + ); + + return false; +} + +export function isSemanticTextUnsupportedError(error: Error) { + const semanticTextUnsupportedError = + 'The [sparse_vector] field type is not supported on indices created on versions 8.0 to 8.10'; + + const isSemanticTextUnspported = + error instanceof errors.ResponseError && + (error.message.includes(semanticTextUnsupportedError) || + // @ts-expect-error + error.meta?.body?.error?.caused_by?.reason.includes(semanticTextUnsupportedError)); + + return isSemanticTextUnspported; +} diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/util/sleep.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/util/sleep.ts new file mode 100644 index 0000000000000..358a3a6fcd020 --- /dev/null +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/service/util/sleep.ts @@ -0,0 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +export async function sleep(ms: number) { + return new Promise((resolve) => setTimeout(resolve, ms)); +} diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/utils/recall/recall_and_score.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/utils/recall/recall_and_score.ts index dd42b2e7045ce..091e11c942e69 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/utils/recall/recall_and_score.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/utils/recall/recall_and_score.ts @@ -51,6 +51,7 @@ export async function recallAndScore({ ); if (!suggestions.length) { + logger.debug('No suggestions found during recall'); return { relevantDocuments: [], llmScores: [], @@ -58,6 +59,8 @@ export async function recallAndScore({ }; } + logger.debug(`Found ${suggestions.length} suggestions during recall`); + try { const { llmScores, relevantDocuments } = await scoreSuggestions({ suggestions, @@ -70,6 +73,10 @@ export async function recallAndScore({ chat, }); + logger.debug( + `Found ${relevantDocuments.length} relevant documents out of ${suggestions.length} suggestions` + ); + analytics.reportEvent(recallRankingEventType, { scoredDocuments: suggestions.map((suggestion) => { const llmScore = llmScores.find((score) => score.id === suggestion.id); diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/server/utils/recall/score_suggestions.ts b/x-pack/platform/plugins/shared/observability_ai_assistant/server/utils/recall/score_suggestions.ts index f7975d0325048..fba7bbce7ed97 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/server/utils/recall/score_suggestions.ts +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/server/utils/recall/score_suggestions.ts @@ -16,10 +16,12 @@ import { parseSuggestionScores } from './parse_suggestion_scores'; import { RecalledSuggestion } from './recall_and_score'; import { ShortIdTable } from '../../../common/utils/short_id_table'; +export const SCORE_FUNCTION_NAME = 'score'; + const scoreFunctionRequestRt = t.type({ message: t.type({ function_call: t.type({ - name: t.literal('score'), + name: t.literal(SCORE_FUNCTION_NAME), arguments: t.string, }), }), @@ -91,7 +93,7 @@ export async function scoreSuggestions({ }; const scoreFunction = { - name: 'score', + name: SCORE_FUNCTION_NAME, description: 'Use this function to score documents based on how relevant they are to the conversation.', parameters: { @@ -115,7 +117,7 @@ export async function scoreSuggestions({ chat('score_suggestions', { messages: [...messages.slice(0, -2), newUserMessage], functions: [scoreFunction], - functionCall: 'score', + functionCall: SCORE_FUNCTION_NAME, signal, stream: true, }).pipe(concatenateChatCompletionChunks()) diff --git a/x-pack/platform/plugins/shared/observability_ai_assistant/tsconfig.json b/x-pack/platform/plugins/shared/observability_ai_assistant/tsconfig.json index 5a0faa995ffa3..0ddeab520d03d 100644 --- a/x-pack/platform/plugins/shared/observability_ai_assistant/tsconfig.json +++ b/x-pack/platform/plugins/shared/observability_ai_assistant/tsconfig.json @@ -54,6 +54,7 @@ "@kbn/core-http-browser", "@kbn/sse-utils", "@kbn/core-security-server", + "@kbn/ml-trained-models-utils", "@kbn/lock-manager" ], "exclude": ["target/**/*"] diff --git a/x-pack/solutions/observability/plugins/observability_ai_assistant_management/public/hooks/use_get_knowledge_base_entries.ts b/x-pack/solutions/observability/plugins/observability_ai_assistant_management/public/hooks/use_get_knowledge_base_entries.ts index c1a119d6a0f49..0274331379f1d 100644 --- a/x-pack/solutions/observability/plugins/observability_ai_assistant_management/public/hooks/use_get_knowledge_base_entries.ts +++ b/x-pack/solutions/observability/plugins/observability_ai_assistant_management/public/hooks/use_get_knowledge_base_entries.ts @@ -6,6 +6,7 @@ */ import { useQuery } from '@tanstack/react-query'; +import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/public'; import { REACT_QUERY_KEYS } from '../constants'; import { useKibana } from './use_kibana'; @@ -13,10 +14,12 @@ export function useGetKnowledgeBaseEntries({ query, sortBy, sortDirection, + kbState, }: { query: string; sortBy: string; sortDirection: 'asc' | 'desc'; + kbState?: KnowledgeBaseState; }) { const { observabilityAIAssistant } = useKibana().services; @@ -40,6 +43,7 @@ export function useGetKnowledgeBaseEntries({ }, }); }, + enabled: kbState === KnowledgeBaseState.READY, keepPreviousData: true, refetchOnWindowFocus: false, }); diff --git a/x-pack/solutions/observability/plugins/observability_ai_assistant_management/public/routes/components/knowledge_base_tab.test.tsx b/x-pack/solutions/observability/plugins/observability_ai_assistant_management/public/routes/components/knowledge_base_tab.test.tsx index 5f198d4f4e93f..f282ddfe51962 100644 --- a/x-pack/solutions/observability/plugins/observability_ai_assistant_management/public/routes/components/knowledge_base_tab.test.tsx +++ b/x-pack/solutions/observability/plugins/observability_ai_assistant_management/public/routes/components/knowledge_base_tab.test.tsx @@ -7,6 +7,7 @@ import React from 'react'; import { fireEvent } from '@testing-library/react'; +import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/public'; import { useGenAIConnectors, useKnowledgeBase } from '@kbn/ai-assistant/src/hooks'; import { render } from '../../helpers/test_helper'; import { useCreateKnowledgeBaseEntry } from '../../hooks/use_create_knowledge_base_entry'; @@ -20,6 +21,12 @@ jest.mock('../../hooks/use_create_knowledge_base_entry'); jest.mock('../../hooks/use_import_knowledge_base_entries'); jest.mock('../../hooks/use_delete_knowledge_base_entry'); jest.mock('@kbn/ai-assistant/src/hooks'); +jest.mock('@kbn/ai-assistant/src/hooks/use_inference_endpoints', () => ({ + useInferenceEndpoints: () => ({ + inferenceEndpoints: [{ inference_id: 'id1' }, { inference_id: 'id2' }], + isLoading: false, + }), +})); const useGetKnowledgeBaseEntriesMock = useGetKnowledgeBaseEntries as jest.Mock; const useCreateKnowledgeBaseEntryMock = useCreateKnowledgeBaseEntry as jest.Mock; @@ -57,12 +64,13 @@ describe('KnowledgeBaseTab', () => { useKnowledgeBaseMock.mockReturnValue({ status: { value: { - ready: false, + kbState: KnowledgeBaseState.NOT_INSTALLED, enabled: true, }, loading: true, }, isInstalling: false, + isPolling: false, install: jest.fn(), }); }); @@ -78,7 +86,7 @@ describe('KnowledgeBaseTab', () => { useKnowledgeBaseMock.mockReturnValue({ status: { value: { - ready: false, + kbState: KnowledgeBaseState.NOT_INSTALLED, enabled: true, }, loading: false, @@ -101,7 +109,7 @@ describe('KnowledgeBaseTab', () => { useKnowledgeBaseMock.mockReturnValue({ status: { value: { - ready: true, + kbState: KnowledgeBaseState.READY, enabled: true, }, }, @@ -109,6 +117,7 @@ describe('KnowledgeBaseTab', () => { install: jest.fn(), }); }); + it('should render a table', () => { const { getByTestId } = render(); expect(getByTestId('knowledgeBaseTable')).toBeInTheDocument(); diff --git a/x-pack/solutions/observability/plugins/observability_ai_assistant_management/public/routes/components/knowledge_base_tab.tsx b/x-pack/solutions/observability/plugins/observability_ai_assistant_management/public/routes/components/knowledge_base_tab.tsx index 89c7d78838d63..dbe254c20b971 100644 --- a/x-pack/solutions/observability/plugins/observability_ai_assistant_management/public/routes/components/knowledge_base_tab.tsx +++ b/x-pack/solutions/observability/plugins/observability_ai_assistant_management/public/routes/components/knowledge_base_tab.tsx @@ -21,16 +21,23 @@ import { EuiFlexGroup, EuiFlexItem, EuiIcon, + EuiLink, EuiLoadingSpinner, + EuiPanel, EuiPopover, EuiScreenReaderOnly, EuiSpacer, + EuiText, + useEuiTheme, } from '@elastic/eui'; -import { WelcomeMessageKnowledgeBase } from '@kbn/ai-assistant/src/chat/welcome_message_knowledge_base'; import { css } from '@emotion/css'; -import { KnowledgeBaseEntry } from '@kbn/observability-ai-assistant-plugin/public'; +import { + KnowledgeBaseEntry, + KnowledgeBaseState, +} from '@kbn/observability-ai-assistant-plugin/public'; import { useKnowledgeBase } from '@kbn/ai-assistant/src/hooks'; -import { AssistantBeacon } from '@kbn/ai-assistant-icon'; +import { KnowledgeBaseInstallationStatusPanel } from '@kbn/ai-assistant/src/knowledge_base/knowledge_base_installation_status_panel'; +import { SettingUpKnowledgeBase } from '@kbn/ai-assistant/src/knowledge_base/setting_up_knowledge_base'; import { useGetKnowledgeBaseEntries } from '../../hooks/use_get_knowledge_base_entries'; import { categorizeEntries, KnowledgeBaseEntryCategory } from '../../helpers/categorize_entries'; import { KnowledgeBaseEditManualEntryFlyout } from './knowledge_base_edit_manual_entry_flyout'; @@ -47,11 +54,16 @@ const centerMaxWidthClassName = css` text-align: center; `; +const panelClassname = css` + width: 100%; +`; + export function KnowledgeBaseTab() { const { uiSettings } = useKibana().services; const dateFormat = uiSettings.get('dateFormat'); const knowledgeBase = useKnowledgeBase(); + const { euiTheme } = useEuiTheme(); const columns: Array> = [ { @@ -208,7 +220,13 @@ export function KnowledgeBaseTab() { entries = [], isLoading, refetch, - } = useGetKnowledgeBaseEntries({ query, sortBy, sortDirection }); + } = useGetKnowledgeBaseEntries({ + query, + sortBy, + sortDirection, + kbState: knowledgeBase.status.value?.kbState, + }); + const categorizedEntries = categorizeEntries({ entries }); const handleChangeSort = ({ sort }: Criteria) => { @@ -223,7 +241,7 @@ export function KnowledgeBaseTab() { setQuery(e?.currentTarget.value || ''); }; - if (knowledgeBase.status.loading) { + if (knowledgeBase.status.loading && !knowledgeBase.isInstalling) { return ( @@ -233,183 +251,212 @@ export function KnowledgeBaseTab() { ); } - return knowledgeBase.status.value?.ready ? ( - <> - - - - - - + if (knowledgeBase.status.value?.kbState === KnowledgeBaseState.READY) { + return ( + <> + + + + + + - - refetch()} - > - {i18n.translate( - 'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.reloadButtonLabel', - { defaultMessage: 'Reload' } - )} - - - - setIsEditUserInstructionFlyoutOpen(true)} - > - {i18n.translate( - 'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.editInstructionsButtonLabel', - { defaultMessage: 'Edit User-specific Prompt' } - )} - - + + refetch()} + > + {i18n.translate( + 'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.reloadButtonLabel', + { defaultMessage: 'Reload' } + )} + + + + setIsEditUserInstructionFlyoutOpen(true)} + > + {i18n.translate( + 'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.editInstructionsButtonLabel', + { defaultMessage: 'Edit User-specific Prompt' } + )} + + - - setIsNewEntryPopoverOpen(false)} - button={ - setIsNewEntryPopoverOpen((prevValue) => !prevValue)} - > - {i18n.translate( - 'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.newEntryButtonLabel', - { - defaultMessage: 'New entry', - } - )} - - } - > - { - setIsNewEntryPopoverOpen(false); - setNewEntryFlyoutType('singleEntry'); - }} - size="s" - > - {i18n.translate( - 'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.singleEntryContextMenuItemLabel', - { defaultMessage: 'Single entry' } - )} - , - { - setIsNewEntryPopoverOpen(false); - setNewEntryFlyoutType('bulkImport'); - }} + + setIsNewEntryPopoverOpen(false)} + button={ + setIsNewEntryPopoverOpen((prevValue) => !prevValue)} > {i18n.translate( - 'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.bulkImportContextMenuItemLabel', - { defaultMessage: 'Bulk import' } + 'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.newEntryButtonLabel', + { + defaultMessage: 'New entry', + } )} - , - ]} - /> - - - - + + } + > + { + setIsNewEntryPopoverOpen(false); + setNewEntryFlyoutType('singleEntry'); + }} + size="s" + > + {i18n.translate( + 'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.singleEntryContextMenuItemLabel', + { defaultMessage: 'Single entry' } + )} + , + { + setIsNewEntryPopoverOpen(false); + setNewEntryFlyoutType('bulkImport'); + }} + > + {i18n.translate( + 'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.bulkImportContextMenuItemLabel', + { defaultMessage: 'Bulk import' } + )} + , + ]} + /> + + + + - - - data-test-subj="knowledgeBaseTable" - columns={columns} - items={categorizedEntries} - loading={isLoading} - sorting={{ - sort: { - field: sortBy, - direction: sortDirection, - }, - }} - rowProps={(row) => ({ - onClick: () => setSelectedCategory(row), - })} - onChange={handleChangeSort} + + + data-test-subj="knowledgeBaseTable" + columns={columns} + items={categorizedEntries} + loading={isLoading} + sorting={{ + sort: { + field: sortBy, + direction: sortDirection, + }, + }} + rowProps={(row) => ({ + onClick: () => setSelectedCategory(row), + })} + onChange={handleChangeSort} + /> + + + + {isEditUserInstructionFlyoutOpen ? ( + setIsEditUserInstructionFlyoutOpen(false)} /> - - + ) : null} - {isEditUserInstructionFlyoutOpen ? ( - setIsEditUserInstructionFlyoutOpen(false)} - /> - ) : null} + {newEntryFlyoutType === 'singleEntry' ? ( + setNewEntryFlyoutType(undefined)} /> + ) : null} - {newEntryFlyoutType === 'singleEntry' ? ( - setNewEntryFlyoutType(undefined)} /> - ) : null} + {newEntryFlyoutType === 'bulkImport' ? ( + setNewEntryFlyoutType(undefined)} /> + ) : null} - {newEntryFlyoutType === 'bulkImport' ? ( - setNewEntryFlyoutType(undefined)} /> - ) : null} + {selectedCategory ? ( + selectedCategory.entries.length === 1 && + (selectedCategory.entries[0].role === 'user_entry' || + selectedCategory.entries[0].role === 'assistant_summarization') ? ( + { + setSelectedCategory(undefined); + refetch(); + }} + /> + ) : ( + setSelectedCategory(undefined)} + /> + ) + ) : null} + + ); + } - {selectedCategory ? ( - selectedCategory.entries.length === 1 && - (selectedCategory.entries[0].role === 'user_entry' || - selectedCategory.entries[0].role === 'assistant_summarization') ? ( - { - setSelectedCategory(undefined); - refetch(); - }} - /> - ) : ( - setSelectedCategory(undefined)} - /> - ) - ) : null} - - ) : ( + return ( - - - + + {i18n.translate('xpack.observabilityAiAssistantManagement.knowledgeBaseTab.description', { + defaultMessage: + 'Knowledge Base is a feature that enables the AI Assistant to recall multiple knowledge sources: documents, organizational resources like runbooks, GitHub issues, and internal documentation. It improves response quality with added context for more tailored assistance. ', + })} + + {i18n.translate( + 'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.learnMoreLink', + { + defaultMessage: 'Learn More', + } + )} + + - - - - + + + {knowledgeBase.isInstalling ? ( + + ) : ( + + )} + + ); } diff --git a/x-pack/solutions/observability/plugins/observability_ai_assistant_management/tsconfig.json b/x-pack/solutions/observability/plugins/observability_ai_assistant_management/tsconfig.json index ea66c0e418635..e18a5af3c6e6d 100644 --- a/x-pack/solutions/observability/plugins/observability_ai_assistant_management/tsconfig.json +++ b/x-pack/solutions/observability/plugins/observability_ai_assistant_management/tsconfig.json @@ -28,8 +28,7 @@ "@kbn/ai-assistant", "@kbn/core-plugins-server", "@kbn/product-doc-base-plugin", - "@kbn/ai-assistant-icon", - "@kbn/ml-plugin" + "@kbn/ml-plugin", ], "exclude": [ "target/**/*" diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/.gitignore b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/.gitignore deleted file mode 100644 index d555c9d94945b..0000000000000 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -# unzipped snapshot folder -knowledge_base/snapshot_kb_8.10/ \ No newline at end of file diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/chat/chat.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/chat/chat.spec.ts index 4a7772c904df7..0893cef92af26 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/chat/chat.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/chat/chat.spec.ts @@ -70,8 +70,9 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon expect(status).to.be(404); }); - it('returns a 200 if the connector exists', async () => { - void proxy.interceptConversation('Hello from LLM Proxy'); + // Fails on ECH: https://github.com/elastic/kibana/issues/219203 + it.skip('returns a 200 if the connector exists', async () => { + void proxy.interceptWithResponse('Hello from LLM Proxy'); const { status } = await observabilityAIAssistantAPIClient.editor({ endpoint: 'POST /internal/observability_ai_assistant/chat', params: { @@ -89,8 +90,9 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon expect(status).to.be(200); }); - it('should forward the system message to the LLM', async () => { - const simulatorPromise = proxy.interceptConversation('Hello from LLM Proxy'); + // Fails on ECH: https://github.com/elastic/kibana/issues/219203 + it.skip('should forward the system message to the LLM', async () => { + const simulatorPromise = proxy.interceptWithResponse('Hello from LLM Proxy'); await observabilityAIAssistantAPIClient.editor({ endpoint: 'POST /internal/observability_ai_assistant/chat', params: { @@ -128,7 +130,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon new Promise((resolve, reject) => { async function runTest() { const chunks = times(NUM_RESPONSES).map((i) => `Part: ${i}\n`); - void proxy.interceptConversation(chunks); + void proxy.interceptWithResponse(chunks); const receivedChunks: Array> = []; diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/complete.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/complete.spec.ts index aeedd874f15db..ef630e7a77160 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/complete.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/complete.spec.ts @@ -20,7 +20,6 @@ import { ObservabilityAIAssistantScreenContextRequest } from '@kbn/observability import { createLlmProxy, LlmProxy, - ToolMessage, } from '../../../../../../observability_ai_assistant_api_integration/common/create_llm_proxy'; import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; import { SupertestWithRoleScope } from '../../../../services/role_scoped_supertest'; @@ -55,14 +54,9 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon let proxy: LlmProxy; let connectorId: string; - async function getEvents( - params: { screenContexts?: ObservabilityAIAssistantScreenContextRequest[] }, - title: string, - conversationResponse: string | ToolMessage - ) { - void proxy.interceptTitle(title); - void proxy.interceptConversation(conversationResponse); - + async function getEvents(params: { + screenContexts?: ObservabilityAIAssistantScreenContextRequest[]; + }) { const supertestEditorWithCookieCredentials: SupertestWithRoleScope = await roleScopedSupertest.getSupertestWithRoleScope('editor', { useCookieHeader: true, @@ -82,12 +76,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon await proxy.waitForAllInterceptorsToHaveBeenCalled(); - return String(response.body) - .split('\n') - .map((line) => line.trim()) - .filter(Boolean) - .map((line) => JSON.parse(line) as StreamingChatResponseEvent) - .slice(2); // ignore context request/response, we're testing this elsewhere + return decodeEvents(response.body).slice(2); // ignore context request/response, we're testing this elsewhere } before(async () => { @@ -113,7 +102,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon withInternalHeaders: true, }); - proxy.interceptConversation('Hello!').catch((e) => { + proxy.interceptWithResponse('Hello!').catch((e) => { log.error(`Failed to intercept conversation ${e}`); }); @@ -234,7 +223,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon }); it('forwards the system message as the first message in the request to the LLM with message role "system"', async () => { - const simulatorPromise = proxy.interceptConversation('Hello from LLM Proxy'); + const simulatorPromise = proxy.interceptWithResponse('Hello from LLM Proxy'); await observabilityAIAssistantAPIClient.editor({ endpoint: 'POST /internal/observability_ai_assistant/chat/complete', params: { @@ -261,12 +250,12 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon let events: StreamingChatResponseEvent[]; before(async () => { - events = await getEvents({}, 'Title for at new conversation', 'Hello again').then( - (_events) => { - return _events.filter( - (event) => event.type !== StreamingChatResponseEventType.BufferFlush - ); - } + void proxy.interceptTitle('Title for a new conversation'); + void proxy.interceptWithResponse('Hello again'); + + const allEvents = await getEvents({}); + events = allEvents.filter( + (event) => event.type !== StreamingChatResponseEventType.BufferFlush ); }); @@ -309,7 +298,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon expect(omit(events[4], 'conversation.id', 'conversation.last_updated')).to.eql({ type: StreamingChatResponseEventType.ConversationCreate, conversation: { - title: 'Title for at new conversation', + title: 'Title for a new conversation', }, }); }); @@ -323,41 +312,32 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon let events: StreamingChatResponseEvent[]; before(async () => { - events = await getEvents( - { - screenContexts: [ - { - actions: [ - { - name: 'my_action', - description: 'My action', - parameters: { - type: 'object', - properties: { - foo: { - type: 'string', - }, + void proxy.interceptTitle('Title for conversation with screen context action'); + void proxy.interceptWithFunctionRequest({ + name: 'my_action', + arguments: () => JSON.stringify({ foo: 'bar' }), + }); + + events = await getEvents({ + screenContexts: [ + { + actions: [ + { + name: 'my_action', + description: 'My action', + parameters: { + type: 'object', + properties: { + foo: { + type: 'string', }, }, }, - ], - }, - ], - }, - 'Title for conversation with screen context action', - { - tool_calls: [ - { - toolCallId: 'fake-id', - index: 1, - function: { - name: 'my_action', - arguments: JSON.stringify({ foo: 'bar' }), }, - }, - ], - } - ); + ], + }, + ], + }); }); it('closes the stream without persisting the conversation', () => { @@ -407,7 +387,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon throw new Error('Failed to intercept conversation title', e); }); - proxy.interceptConversation('Good night, sir!').catch((e) => { + proxy.interceptWithResponse('Good night, sir!').catch((e) => { throw new Error('Failed to intercept conversation ', e); }); @@ -440,7 +420,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon }, }); - proxy.interceptConversation('Good night, sir!').catch((e) => { + proxy.interceptWithResponse('Good night, sir!').catch((e) => { log.error(`Failed to intercept conversation ${e}`); }); @@ -477,9 +457,6 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon }); }); - // todo - it.skip('executes a function', async () => {}); - describe('security roles and access privileges', () => { it('should deny access for users without the ai_assistant privilege', async () => { const { status } = await observabilityAIAssistantAPIClient.viewer({ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/alerts.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/alerts.spec.ts index 529e745fd534b..c998a3e25e838 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/alerts.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/alerts.spec.ts @@ -101,7 +101,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon }, }); - void proxy.interceptConversation('Hello from LLM Proxy'); + void proxy.interceptWithResponse('Hello from LLM Proxy'); const alertsResponseBody = await invokeChatCompleteWithFunctionRequest({ connectorId, diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/context.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/context.spec.ts index 9a85323baa58e..438a7dfe2c9d5 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/context.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/context.spec.ts @@ -22,13 +22,13 @@ import { createLlmProxy, } from '../../../../../../../observability_ai_assistant_api_integration/common/create_llm_proxy'; import type { DeploymentAgnosticFtrProviderContext } from '../../../../../ftr_provider_context'; -import { - addSampleDocsToInternalKb, - clearKnowledgeBase, - deleteKnowledgeBaseModel, - setupKnowledgeBase, -} from '../../utils/knowledge_base'; +import { addSampleDocsToInternalKb, clearKnowledgeBase } from '../../utils/knowledge_base'; import { chatComplete } from '../../utils/conversation'; +import { + deployTinyElserAndSetupKb, + teardownTinyElserModelAndInferenceEndpoint, +} from '../../utils/model_and_inference'; +import { restoreIndexAssets } from '../../utils/index_assets'; const screenContexts = [ { @@ -84,12 +84,13 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon connectorId = await observabilityAIAssistantAPIClient.createProxyActionConnector({ port: llmProxy.getPort(), }); - await setupKnowledgeBase(getService); + await restoreIndexAssets(observabilityAIAssistantAPIClient, es); + await deployTinyElserAndSetupKb(getService); await addSampleDocsToInternalKb(getService, sampleDocsForInternalKb); ({ getDocuments } = llmProxy.interceptScoreToolChoice(log)); - void llmProxy.interceptConversation('Your favourite color is blue.'); + void llmProxy.interceptWithResponse('Your favourite color is blue.'); ({ messageAddedEvents } = await chatComplete({ userPrompt, @@ -107,10 +108,14 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon actionId: connectorId, }); - await deleteKnowledgeBaseModel(getService); + await teardownTinyElserModelAndInferenceEndpoint(getService); await clearKnowledgeBase(es); }); + afterEach(async () => { + llmProxy.clear(); + }); + describe('calling the context function via /chat/complete', () => { let firstRequestBody: ChatCompletionStreamParams; let secondRequestBody: ChatCompletionStreamParams; @@ -154,7 +159,6 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon const extractedDocs = await getDocuments(); const expectedTexts = sampleDocsForInternalKb.map((doc) => doc.text).sort(); const actualTexts = extractedDocs.map((doc) => doc.text).sort(); - expect(actualTexts).to.eql(expectedTexts); }); }); diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/elasticsearch.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/elasticsearch.spec.ts index e5ff04589ad78..5bbbc8056fd8e 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/elasticsearch.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/elasticsearch.spec.ts @@ -41,7 +41,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon }); // intercept the LLM request and return a fixed response - void proxy.interceptConversation('Hello from LLM Proxy'); + void proxy.interceptWithResponse('Hello from LLM Proxy'); await generateApmData(apmSynthtraceEsClient); diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/execute_query.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/execute_query.spec.ts index abc402655ac61..df585c9693730 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/execute_query.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/execute_query.spec.ts @@ -43,6 +43,10 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon }); }); + afterEach(async () => { + llmProxy.clear(); + }); + // Calling `execute_query` via the chat/complete endpoint describe('POST /internal/observability_ai_assistant/chat/complete', function () { let messageAddedEvents: MessageAddEvent[]; @@ -62,7 +66,6 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon void llmProxy.interceptWithFunctionRequest({ name: 'query', arguments: () => JSON.stringify({}), - when: () => true, }); void llmProxy.interceptWithFunctionRequest({ @@ -81,10 +84,9 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon | SORT @timestamp DESC | LIMIT 10`, }), - when: () => true, }); - void llmProxy.interceptConversation({ content: 'Hello from user' }); + void llmProxy.interceptWithResponse('Hello from user'); ({ messageAddedEvents } = await chatComplete({ userPrompt: 'Please retrieve the most recent Apache log messages', @@ -103,6 +105,10 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon await logsSynthtraceEsClient.clean(); }); + afterEach(async () => { + llmProxy.clear(); + }); + it('makes 4 requests to the LLM', () => { expect(llmProxy.interceptedRequests.length).to.be(4); }); diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/get_alerts_dataset_info.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/get_alerts_dataset_info.spec.ts index 3b014c469f4e8..7f3f67b72dd3a 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/get_alerts_dataset_info.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/get_alerts_dataset_info.spec.ts @@ -73,7 +73,6 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon void llmProxy.interceptWithFunctionRequest({ name: 'get_alerts_dataset_info', arguments: () => JSON.stringify({ start: 'now-10d', end: 'now' }), - when: () => true, }); ({ getRelevantFields } = llmProxy.interceptSelectRelevantFieldsToolChoice()); @@ -81,10 +80,9 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon void llmProxy.interceptWithFunctionRequest({ name: 'alerts', arguments: () => JSON.stringify({ start: 'now-10d', end: 'now' }), - when: () => true, }); - void llmProxy.interceptConversation( + void llmProxy.interceptWithResponse( `You have active alerts for the past 10 days. Back to work!` ); @@ -114,6 +112,10 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon await samlAuth.invalidateM2mApiKeyWithRoleScope(roleAuthc); }); + afterEach(async () => { + llmProxy.clear(); + }); + describe('POST /internal/observability_ai_assistant/chat/complete', () => { let firstRequestBody: ChatCompletionStreamParams; let secondRequestBody: ChatCompletionStreamParams; diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/get_dataset_info.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/get_dataset_info.spec.ts index c778c14aa30e7..2999760b58b8d 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/get_dataset_info.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/get_dataset_info.spec.ts @@ -44,6 +44,10 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon }); }); + afterEach(async () => { + llmProxy.clear(); + }); + // Calling `get_dataset_info` via the chat/complete endpoint describe('POST /internal/observability_ai_assistant/chat/complete', function () { let messageAddedEvents: MessageAddEvent[]; @@ -62,12 +66,11 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon void llmProxy.interceptWithFunctionRequest({ name: 'get_dataset_info', arguments: () => JSON.stringify({ index: 'logs*' }), - when: () => true, }); ({ getRelevantFields } = llmProxy.interceptSelectRelevantFieldsToolChoice()); - void llmProxy.interceptConversation(`Yes, you do have logs. Congratulations! 🎈️🎈️🎈️`); + void llmProxy.interceptWithResponse(`Yes, you do have logs. Congratulations! 🎈️🎈️🎈️`); ({ messageAddedEvents } = await chatComplete({ userPrompt: USER_MESSAGE, @@ -86,6 +89,10 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon await logsSynthtraceEsClient.clean(); }); + afterEach(async () => { + llmProxy.clear(); + }); + it('makes 3 requests to the LLM', () => { expect(llmProxy.interceptedRequests.length).to.be(3); }); diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/recall.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/recall.spec.ts index c3f4e4607ca71..db2218a3a76cb 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/recall.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/recall.spec.ts @@ -10,12 +10,14 @@ import { first, uniq } from 'lodash'; import type { DeploymentAgnosticFtrProviderContext } from '../../../../../ftr_provider_context'; import { clearKnowledgeBase, - deleteKnowledgeBaseModel, addSampleDocsToInternalKb, addSampleDocsToCustomIndex, - setupKnowledgeBase, } from '../../utils/knowledge_base'; import { animalSampleDocs, technicalSampleDocs } from '../../utils/sample_docs'; +import { + deployTinyElserAndSetupKb, + teardownTinyElserModelAndInferenceEndpoint, +} from '../../utils/model_and_inference'; const customSearchConnectorIndex = 'animals_kb'; @@ -25,13 +27,13 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon describe('recall', function () { before(async () => { - await setupKnowledgeBase(getService); + await deployTinyElserAndSetupKb(getService); await addSampleDocsToInternalKb(getService, technicalSampleDocs); await addSampleDocsToCustomIndex(getService, animalSampleDocs, customSearchConnectorIndex); }); after(async () => { - await deleteKnowledgeBaseModel(getService); + await teardownTinyElserModelAndInferenceEndpoint(getService); await clearKnowledgeBase(es); // clear custom index await es.indices.delete({ index: customSearchConnectorIndex }, { ignore: [404] }); diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/retrieve_elastic_doc.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/retrieve_elastic_doc.spec.ts index 8d2b7e5da91df..2447bb81b4e4b 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/retrieve_elastic_doc.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/retrieve_elastic_doc.spec.ts @@ -38,7 +38,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon connectorId = await observabilityAIAssistantAPIClient.createProxyActionConnector({ port: llmProxy.getPort(), }); - void llmProxy.interceptConversation('Hello from LLM Proxy'); + void llmProxy.interceptWithResponse('Hello from LLM Proxy'); await chatComplete({ userPrompt: USER_PROMPT, @@ -56,6 +56,10 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon }); }); + afterEach(async () => { + llmProxy.clear(); + }); + it('makes 1 requests to the LLM', () => { expect(llmProxy.interceptedRequests.length).to.be(1); }); @@ -101,7 +105,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon when: () => true, }); - void llmProxy.interceptConversation('Hello from LLM Proxy'); + void llmProxy.interceptWithResponse('Hello from LLM Proxy'); ({ messageAddedEvents } = await chatComplete({ userPrompt: USER_PROMPT, diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/summarize.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/summarize.spec.ts index d074832e26c4c..cdc0b6d6cf76b 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/summarize.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/summarize.spec.ts @@ -14,10 +14,10 @@ import { import type { DeploymentAgnosticFtrProviderContext } from '../../../../../ftr_provider_context'; import { invokeChatCompleteWithFunctionRequest } from '../../utils/conversation'; import { - clearKnowledgeBase, - deleteKnowledgeBaseModel, - setupKnowledgeBase, -} from '../../utils/knowledge_base'; + deployTinyElserAndSetupKb, + teardownTinyElserModelAndInferenceEndpoint, +} from '../../utils/model_and_inference'; +import { clearKnowledgeBase, getKnowledgeBaseEntriesFromApi } from '../../utils/knowledge_base'; export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { const log = getService('log'); @@ -31,15 +31,15 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon let connectorId: string; before(async () => { - await setupKnowledgeBase(getService); - + await deployTinyElserAndSetupKb(getService); proxy = await createLlmProxy(log); + connectorId = await observabilityAIAssistantAPIClient.createProxyActionConnector({ port: proxy.getPort(), }); // intercept the LLM request and return a fixed response - void proxy.interceptConversation('Hello from LLM Proxy'); + void proxy.interceptWithResponse('Hello from LLM Proxy'); await invokeChatCompleteWithFunctionRequest({ connectorId, @@ -61,26 +61,17 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon }); after(async () => { - proxy.close(); + proxy?.close(); await observabilityAIAssistantAPIClient.deleteActionConnector({ actionId: connectorId, }); - await deleteKnowledgeBaseModel(getService); + await teardownTinyElserModelAndInferenceEndpoint(getService); await clearKnowledgeBase(es); }); it('persists entry in knowledge base', async () => { - const res = await observabilityAIAssistantAPIClient.editor({ - endpoint: 'GET /internal/observability_ai_assistant/kb/entries', - params: { - query: { - query: '', - sortBy: 'title', - sortDirection: 'asc', - }, - }, - }); + const res = await getKnowledgeBaseEntriesFromApi({ observabilityAIAssistantAPIClient }); const { role, public: isPublic, text, type, user, title } = res.body.entries[0]; diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/title_conversation.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/title_conversation.spec.ts index 3c8f0a9411e32..597d28d6696cc 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/title_conversation.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/title_conversation.spec.ts @@ -54,7 +54,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon before(async () => { await clearConversations(es); const simulatorPromise = llmProxy.interceptTitle(TITLE); - void llmProxy.interceptConversation('The sky is blue because of Rayleigh scattering.'); + void llmProxy.interceptWithResponse('The sky is blue because of Rayleigh scattering.'); const res = await chatComplete({ userPrompt: 'Why the sky is blue?', diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/distributed_lock_manager/distributed_lock_manager.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/distributed_lock_manager/distributed_lock_manager.spec.ts index c0a511d4eb3eb..ef6a9c26cb29b 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/distributed_lock_manager/distributed_lock_manager.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/distributed_lock_manager/distributed_lock_manager.spec.ts @@ -28,7 +28,7 @@ import { } from '@kbn/lock-manager/src/setup_lock_manager_index'; import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; -import { getLoggerMock } from '../utils/logger'; +import { getLoggerMock } from '../utils/kibana_mocks'; import { dateAsTimestamp, durationAsMs, sleep } from '../utils/time'; export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/index.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/index.ts index d7f318d9244b7..46f7e22bf7698 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/index.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/index.ts @@ -11,36 +11,40 @@ export default function aiAssistantApiIntegrationTests({ loadTestFile, }: DeploymentAgnosticFtrProviderContext) { describe('observability AI Assistant', function () { - loadTestFile(require.resolve('./conversations/conversations.spec.ts')); - loadTestFile(require.resolve('./connectors/connectors.spec.ts')); - loadTestFile(require.resolve('./chat/chat.spec.ts')); - loadTestFile(require.resolve('./complete/complete.spec.ts')); + // Functions loadTestFile(require.resolve('./complete/functions/alerts.spec.ts')); + loadTestFile(require.resolve('./complete/functions/context.spec.ts')); + loadTestFile(require.resolve('./complete/functions/elasticsearch.spec.ts')); + loadTestFile(require.resolve('./complete/functions/execute_query.spec.ts')); loadTestFile(require.resolve('./complete/functions/get_alerts_dataset_info.spec.ts')); loadTestFile(require.resolve('./complete/functions/get_dataset_info.spec.ts')); - loadTestFile(require.resolve('./complete/functions/execute_query.spec.ts')); - loadTestFile(require.resolve('./complete/functions/elasticsearch.spec.ts')); + loadTestFile(require.resolve('./complete/functions/recall.spec.ts')); loadTestFile(require.resolve('./complete/functions/retrieve_elastic_doc.spec.ts')); loadTestFile(require.resolve('./complete/functions/summarize.spec.ts')); - loadTestFile(require.resolve('./complete/functions/recall.spec.ts')); - loadTestFile(require.resolve('./complete/functions/context.spec.ts')); loadTestFile(require.resolve('./complete/functions/title_conversation.spec.ts')); - loadTestFile(require.resolve('./public_complete/public_complete.spec.ts')); - loadTestFile(require.resolve('./knowledge_base/knowledge_base_setup.spec.ts')); - loadTestFile( - require.resolve( - './knowledge_base/knowledge_base_reindex_and_populate_missing_semantic_text_fields.spec.ts' - ) - ); - loadTestFile( - require.resolve( - './knowledge_base/knowledge_base_reindex_to_fix_sparse_vector_support.spec.ts' - ) - ); + + // knowledge base + loadTestFile(require.resolve('./knowledge_base/knowledge_base_8.10_upgrade_test.spec.ts')); + loadTestFile(require.resolve('./knowledge_base/knowledge_base_8.16_upgrade_test.spec.ts')); + loadTestFile(require.resolve('./knowledge_base/knowledge_base_8.18_upgrade_test.spec.ts')); loadTestFile(require.resolve('./knowledge_base/knowledge_base_reindex_concurrency.spec.ts')); + loadTestFile(require.resolve('./knowledge_base/knowledge_base_setup.spec.ts')); loadTestFile(require.resolve('./knowledge_base/knowledge_base_status.spec.ts')); - loadTestFile(require.resolve('./knowledge_base/knowledge_base.spec.ts')); loadTestFile(require.resolve('./knowledge_base/knowledge_base_user_instructions.spec.ts')); + loadTestFile(require.resolve('./knowledge_base/knowledge_base.spec.ts')); + loadTestFile( + require.resolve('./knowledge_base/knowledge_base_change_model_from_elser_to_e5.spec.ts') + ); + + // Misc. + loadTestFile(require.resolve('./chat/chat.spec.ts')); + loadTestFile(require.resolve('./complete/complete.spec.ts')); + loadTestFile(require.resolve('./index_assets/index_assets.spec.ts')); + loadTestFile(require.resolve('./connectors/connectors.spec.ts')); + loadTestFile(require.resolve('./conversations/conversations.spec.ts')); + + // public endpoints + loadTestFile(require.resolve('./public_complete/public_complete.spec.ts')); loadTestFile(require.resolve('./distributed_lock_manager/distributed_lock_manager.spec.ts')); }); } diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/index_assets/index_assets.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/index_assets/index_assets.spec.ts index b70ee2aad033f..1f3c6a3b0d955 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/index_assets/index_assets.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/index_assets/index_assets.spec.ts @@ -33,7 +33,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon }); } - for (const writeIndexName of Object.values(resourceNames.concreteIndexName)) { + for (const writeIndexName of Object.values(resourceNames.concreteWriteIndexName)) { it(`should create write index: "${writeIndexName}"`, async () => { const exists = await es.indices.exists({ index: writeIndexName }); expect(exists).to.be(true); @@ -54,7 +54,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon expect(indices).to.have.length(2); expect(indices.map(({ index }) => index).sort()).to.eql( - Object.values(resourceNames.concreteIndexName).sort() + Object.values(resourceNames.concreteWriteIndexName).sort() ); }); }); diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base.spec.ts index 689e66fe984ea..18c548971b8fd 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base.spec.ts @@ -7,12 +7,13 @@ import expect from '@kbn/expect'; import { type KnowledgeBaseEntry } from '@kbn/observability-ai-assistant-plugin/common'; +import { orderBy, size, toPairs } from 'lodash'; import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; +import { clearKnowledgeBase, getKnowledgeBaseEntriesFromEs } from '../utils/knowledge_base'; import { - clearKnowledgeBase, - deleteKnowledgeBaseModel, - setupKnowledgeBase, -} from '../utils/knowledge_base'; + teardownTinyElserModelAndInferenceEndpoint, + deployTinyElserAndSetupKb, +} from '../utils/model_and_inference'; export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { const es = getService('es'); @@ -40,16 +41,16 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon }); expect(res.status).to.be(200); - return omitCategories(res.body.entries); + return res.body.entries; } describe('Knowledge base', function () { before(async () => { - await setupKnowledgeBase(getService); + await deployTinyElserAndSetupKb(getService); }); after(async () => { - await deleteKnowledgeBaseModel(getService); + await teardownTinyElserModelAndInferenceEndpoint(getService); await clearKnowledgeBase(es); }); @@ -59,48 +60,42 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon title: 'My title', text: 'My content', }; - it('returns 200 on create', async () => { + + before(async () => { const { status } = await observabilityAIAssistantAPIClient.editor({ endpoint: 'POST /internal/observability_ai_assistant/kb/entries/save', params: { body: knowledgeBaseEntry }, }); expect(status).to.be(200); - const res = await observabilityAIAssistantAPIClient.editor({ - endpoint: 'GET /internal/observability_ai_assistant/kb/entries', - params: { - query: { - query: '', - sortBy: 'title', - sortDirection: 'asc', - }, - }, - }); - const entry = res.body.entries[0]; - expect(entry.id).to.equal(knowledgeBaseEntry.id); - expect(entry.title).to.equal(knowledgeBaseEntry.title); - expect(entry.text).to.equal(knowledgeBaseEntry.text); }); - it('returns 200 on get entries and entry exists', async () => { - const res = await observabilityAIAssistantAPIClient.editor({ - endpoint: 'GET /internal/observability_ai_assistant/kb/entries', - params: { - query: { - query: '', - sortBy: 'title', - sortDirection: 'asc', - }, - }, - }); - - expect(res.status).to.be(200); - const entry = res.body.entries[0]; + it('can retrieve the entry', async () => { + const entries = await getEntries(); + const entry = entries[0]; expect(entry.id).to.equal(knowledgeBaseEntry.id); expect(entry.title).to.equal(knowledgeBaseEntry.title); expect(entry.text).to.equal(knowledgeBaseEntry.text); }); - it('returns 200 on delete', async () => { + it('generates sparse embeddings', async () => { + const hits = await getKnowledgeBaseEntriesFromEs(es); + const embeddings = + hits[0]._source?._inference_fields?.semantic_text?.inference.chunks.semantic_text[0] + .embeddings; + + const sorted = orderBy(toPairs(embeddings), [1], ['desc']).slice(0, 5); + + expect(size(embeddings)).to.be.greaterThan(10); + expect(sorted).to.eql([ + ['temperature', 0.07421875], + ['used', 0.068359375], + ['definition', 0.03955078], + ['only', 0.038208008], + ['what', 0.028930664], + ]); + }); + + it('can delete the entry', async () => { const entryId = 'my-doc-id-1'; const { status } = await observabilityAIAssistantAPIClient.editor({ endpoint: 'DELETE /internal/observability_ai_assistant/kb/entries/{entryId}', @@ -110,21 +105,8 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon }); expect(status).to.be(200); - const res = await observabilityAIAssistantAPIClient.editor({ - endpoint: 'GET /internal/observability_ai_assistant/kb/entries', - params: { - query: { - query: '', - sortBy: 'title', - sortDirection: 'asc', - }, - }, - }); - - expect(res.status).to.be(200); - expect(res.body.entries.filter((entry) => entry.id.startsWith('my-doc-id')).length).to.eql( - 0 - ); + const entries = await getEntries(); + expect(entries.length).to.eql(0); }); it('returns 500 on delete not found', async () => { @@ -174,22 +156,20 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon await clearKnowledgeBase(es); }); - it('returns 200 on create', async () => { + it('creates multiple entries', async () => { const entries = await getEntries(); - expect(omitCategories(entries).length).to.eql(3); + expect(entries.length).to.eql(3); }); describe('when sorting ', () => { - const ascendingOrder = ['my_doc_a', 'my_doc_b', 'my_doc_c']; - it('allows sorting ascending', async () => { const entries = await getEntries({ sortBy: 'title', sortDirection: 'asc' }); - expect(entries.map(({ id }) => id)).to.eql(ascendingOrder); + expect(entries.map(({ id }) => id)).to.eql(['my_doc_a', 'my_doc_b', 'my_doc_c']); }); it('allows sorting descending', async () => { const entries = await getEntries({ sortBy: 'title', sortDirection: 'desc' }); - expect(entries.map(({ id }) => id)).to.eql([...ascendingOrder].reverse()); + expect(entries.map(({ id }) => id)).to.eql(['my_doc_c', 'my_doc_b', 'my_doc_a']); }); }); @@ -352,7 +332,3 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon }); }); } - -function omitCategories(entries: KnowledgeBaseEntry[]) { - return entries.filter((entry) => entry.labels?.category === undefined); -} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_8.10_upgrade_test.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_8.10_upgrade_test.spec.ts new file mode 100644 index 0000000000000..534633fa40b13 --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_8.10_upgrade_test.spec.ts @@ -0,0 +1,160 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import expect from '@kbn/expect'; +import * as semver from 'semver'; +import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/common'; +import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; +import { getKbIndexCreatedVersion } from '../utils/knowledge_base'; +import { + TINY_ELSER_INFERENCE_ID, + TINY_ELSER_MODEL_ID, + setupTinyElserModelAndInferenceEndpoint, + teardownTinyElserModelAndInferenceEndpoint, +} from '../utils/model_and_inference'; +import { + createOrUpdateIndexAssets, + deleteIndexAssets, + restoreIndexAssets, + runStartupMigrations, +} from '../utils/index_assets'; +import { restoreKbSnapshot } from '../utils/snapshots'; + +export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + const es = getService('es'); + const retry = getService('retry'); + const log = getService('log'); + + // Sparse vector field was introduced in Elasticsearch 8.11 + // The semantic text field was added to the knowledge base index in 8.17 + // Indices created in 8.10 do not support semantic text field and need to be reindexed + describe('when upgrading from 8.10 to 8.18', function () { + // Intentionally skipped in all serverless environnments (local and MKI) + // because the migration scenario being tested is not relevant to MKI and Serverless. + this.tags(['skipServerless']); + + before(async () => { + // in a real environment we will use the ELSER inference endpoint (`.elser-2-elasticsearch`) which is pre-installed + // For testing purposes we will use the tiny ELSER model + + log.info('Setting up tiny ELSER model and inference endpoint'); + await setupTinyElserModelAndInferenceEndpoint(getService); + }); + + after(async () => { + log.info('Restoring index assets'); + await restoreIndexAssets(observabilityAIAssistantAPIClient, es); + + log.info('Tearing down tiny ELSER model and inference endpoint'); + await teardownTinyElserModelAndInferenceEndpoint(getService); + }); + + describe('before running migrations', () => { + before(async () => { + log.info('Delete index assets'); + await deleteIndexAssets(es); + + log.info('Restoring snapshot'); + await restoreKbSnapshot({ + log, + es, + snapshotFolderName: 'snapshot_kb_8.10', + snapshotName: 'my_snapshot', + }); + + log.info('Creating index assets'); + await createOrUpdateIndexAssets(observabilityAIAssistantAPIClient); + }); + + it('has an index created version earlier than 8.11', async () => { + await retry.try(async () => { + const indexCreatedVersion = await getKbIndexCreatedVersion(es); + expect(semver.lt(indexCreatedVersion, '8.11.0')).to.be(true); + }); + }); + + it('cannot add new entries to KB until reindex has completed', async () => { + const res1 = await createKnowledgeBaseEntry(); + + expect(res1.status).to.be(503); + expect((res1.body as unknown as Error).message).to.eql( + 'The index ".kibana-observability-ai-assistant-kb" does not support semantic text and must be reindexed. This re-index operation has been scheduled and will be started automatically. Please try again later.' + ); + + // wait for reindex to have updated the index + await retry.try(async () => { + const indexCreatedVersion = await getKbIndexCreatedVersion(es); + expect(semver.gte(indexCreatedVersion, '8.18.0')).to.be(true); + }); + + const res2 = await createKnowledgeBaseEntry(); + expect(res2.status).to.be(200); + }); + }); + + describe('after running migrations', () => { + beforeEach(async () => { + await deleteIndexAssets(es); + await restoreKbSnapshot({ + log, + es, + snapshotFolderName: 'snapshot_kb_8.10', + snapshotName: 'my_snapshot', + }); + await createOrUpdateIndexAssets(observabilityAIAssistantAPIClient); + await runStartupMigrations(observabilityAIAssistantAPIClient); + }); + + it('has an index created version later than 8.18', async () => { + await retry.try(async () => { + const indexCreatedVersion = await getKbIndexCreatedVersion(es); + expect(semver.gt(indexCreatedVersion, '8.18.0')).to.be(true); + }); + }); + + it('can add new entries', async () => { + const { status } = await createKnowledgeBaseEntry(); + expect(status).to.be(200); + }); + + it('has default ELSER inference endpoint', async () => { + await retry.try(async () => { + const { body } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/kb/status', + }); + + expect(body.endpoint?.inference_id).to.eql(TINY_ELSER_INFERENCE_ID); + expect(body.endpoint?.service_settings.model_id).to.eql(TINY_ELSER_MODEL_ID); + }); + }); + + it('have a deployed model', async () => { + await retry.try(async () => { + const { body } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/kb/status', + }); + + expect(body.kbState === KnowledgeBaseState.READY).to.be(true); + }); + }); + }); + + function createKnowledgeBaseEntry() { + const knowledgeBaseEntry = { + id: 'my-doc-id-1', + title: 'My title', + text: 'My content', + }; + + return observabilityAIAssistantAPIClient.editor({ + endpoint: 'POST /internal/observability_ai_assistant/kb/entries/save', + params: { body: knowledgeBaseEntry }, + }); + } + }); +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_8.16_upgrade_test.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_8.16_upgrade_test.spec.ts new file mode 100644 index 0000000000000..498fe75f6d435 --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_8.16_upgrade_test.spec.ts @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import expect from '@kbn/expect'; +import { KnowledgeBaseEntry } from '@kbn/observability-ai-assistant-plugin/common'; +import { sortBy } from 'lodash'; +import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; +import { + getKnowledgeBaseEntriesFromEs, + getKnowledgeBaseEntriesFromApi, +} from '../utils/knowledge_base'; +import { + createOrUpdateIndexAssets, + deleteIndexAssets, + restoreIndexAssets, + runStartupMigrations, +} from '../utils/index_assets'; +import { restoreKbSnapshot } from '../utils/snapshots'; +import { + deployTinyElserAndSetupKb, + teardownTinyElserModelAndInferenceEndpoint, +} from '../utils/model_and_inference'; + +export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + const es = getService('es'); + const retry = getService('retry'); + const log = getService('log'); + + // In 8.16 and earlier embeddings were stored in the `ml.tokens` field + // In 8.17 `ml.tokens` is replaced with `semantic_text` field and the custom ELSER inference endpoint "obs_ai_assistant_kb_inference" is introduced + // When upgrading we must ensure that the semantic_text field is populated + describe('when upgrading from 8.16 to 8.17', function () { + // Intentionally skipped in all serverless environnments (local and MKI) + // because the migration scenario being tested is not relevant to MKI and Serverless. + this.tags(['skipServerless']); + + before(async () => { + await teardownTinyElserModelAndInferenceEndpoint(getService); + await deleteIndexAssets(es); + await restoreKbSnapshot({ + log, + es, + snapshotFolderName: 'snapshot_kb_8.16', + snapshotName: 'kb_snapshot_8.16', + }); + + await createOrUpdateIndexAssets(observabilityAIAssistantAPIClient); + + await deployTinyElserAndSetupKb(getService); + }); + + after(async () => { + await teardownTinyElserModelAndInferenceEndpoint(getService); + await restoreIndexAssets(observabilityAIAssistantAPIClient, es); + }); + + describe('before migrating', () => { + it('the docs do not have semantic_text embeddings', async () => { + const hits = await getKnowledgeBaseEntriesFromEs(es); + const hasSemanticTextEmbeddings = hits.some((hit) => hit._source?.semantic_text); + + expect(hits.length).to.be(60); + expect(hasSemanticTextEmbeddings).to.be(false); + }); + }); + + describe('after migrating', () => { + before(async () => { + await runStartupMigrations(observabilityAIAssistantAPIClient); + }); + + it('the docs have semantic_text field', async () => { + await retry.try(async () => { + const hits = await getKnowledgeBaseEntriesFromEs(es); + const hasSemanticTextField = hits.every((hit) => hit._source?.semantic_text); + + expect(hits.length).to.be(60); + expect(hasSemanticTextField).to.be(true); + }); + }); + + it('the docs have embeddings', async () => { + await retry.try(async () => { + const hits = await getKnowledgeBaseEntriesFromEs(es); + const hasEmbeddings = hits.every( + (hit) => + // @ts-expect-error + Object.keys(hit._source?.semantic_text.inference.chunks[0].embeddings).length > 0 + ); + expect(hits.length).to.be(60); + expect(hasEmbeddings).to.be(true); + }); + }); + + it('returns entries correctly via API', async () => { + const res = await getKnowledgeBaseEntriesFromApi({ observabilityAIAssistantAPIClient }); + expect(res.status).to.be(200); + + expect( + sortBy( + res.body.entries + .filter(omitLensEntry) + .map(({ title, text, type }) => ({ title, text, type })), + ({ title }) => title + ) + ).to.eql([ + { + title: 'movie_quote', + type: 'contextual', + text: 'To infinity and beyond!', + }, + { + title: 'user_color', + type: 'contextual', + text: "The user's favourite color is blue.", + }, + ]); + }); + }); + }); +} + +function omitLensEntry(entry?: KnowledgeBaseEntry) { + return entry?.labels?.category !== 'lens'; +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_8.18_upgrade_test.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_8.18_upgrade_test.spec.ts new file mode 100644 index 0000000000000..3ba050327b659 --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_8.18_upgrade_test.spec.ts @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import expect from '@kbn/expect'; +import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; +import { getKbIndexCreatedVersion, getKnowledgeBaseEntriesFromApi } from '../utils/knowledge_base'; +import { + createOrUpdateIndexAssets, + deleteIndexAssets, + restoreIndexAssets, + runStartupMigrations, +} from '../utils/index_assets'; +import { restoreKbSnapshot } from '../utils/snapshots'; +import { + LEGACY_CUSTOM_INFERENCE_ID, + TINY_ELSER_MODEL_ID, + createTinyElserInferenceEndpoint, + deleteInferenceEndpoint, + deleteModel, + importModel, +} from '../utils/model_and_inference'; + +export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + const es = getService('es'); + const retry = getService('retry'); + const log = getService('log'); + const ml = getService('ml'); + + // In 8.18 inference happens via the custom inference endpoint "obs_ai_assistant_kb_inference" + // In 8.19 / 9.1 the custom inference endpoint ("obs_ai_assistant_kb_inference") is replaced with the preconfigured endpoint ".elser-2-elasticsearch" + // We need to make sure that the custom inference endpoint continues to work after the migration + + describe('when upgrading from 8.18 to 8.19', function () { + this.tags(['skipServerless']); + + before(async () => { + await importModel(ml, { modelId: TINY_ELSER_MODEL_ID }); + await createTinyElserInferenceEndpoint(getService, { + inferenceId: LEGACY_CUSTOM_INFERENCE_ID, + }); + + await deleteIndexAssets(es); + await restoreKbSnapshot({ + log, + es, + snapshotFolderName: 'snapshot_kb_8.18', + snapshotName: 'kb_snapshot_8.18', + }); + + await createOrUpdateIndexAssets(observabilityAIAssistantAPIClient); + await runStartupMigrations(observabilityAIAssistantAPIClient); + }); + + after(async () => { + await restoreIndexAssets(observabilityAIAssistantAPIClient, es); + await deleteModel(getService, { modelId: TINY_ELSER_MODEL_ID }); + await deleteInferenceEndpoint(getService, { inferenceId: LEGACY_CUSTOM_INFERENCE_ID }); + }); + + it('has an index created in 8.18', async () => { + await retry.try(async () => { + const indexVersion = await getKbIndexCreatedVersion(es); + expect(indexVersion).to.be('8.18.0'); + }); + }); + + it('can retrieve entries', async () => { + const res = await getKnowledgeBaseEntriesFromApi({ observabilityAIAssistantAPIClient }); + expect(res.status).to.be(200); + expect(res.body.entries).to.have.length(1); + expect(res.body.entries[0].text).to.be( + 'The user has a 10 meter tall pet dinosaur. It loves carrots.' + ); + }); + + it('can add new entries to KB', async () => { + const res = await createKnowledgeBaseEntry(); + expect(res.status).to.be(200); + }); + + function createKnowledgeBaseEntry() { + const knowledgeBaseEntry = { + id: 'my-doc-id-1', + title: 'My title', + text: 'My content', + }; + + return observabilityAIAssistantAPIClient.editor({ + endpoint: 'POST /internal/observability_ai_assistant/kb/entries/save', + params: { body: knowledgeBaseEntry }, + }); + } + }); +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_change_model_from_elser_to_e5.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_change_model_from_elser_to_e5.spec.ts new file mode 100644 index 0000000000000..e852a7a79fef1 --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_change_model_from_elser_to_e5.spec.ts @@ -0,0 +1,174 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import expect from '@kbn/expect'; +import { getInferenceIdFromWriteIndex } from '@kbn/observability-ai-assistant-plugin/server/service/knowledge_base_service/get_inference_id_from_write_index'; +import { KnowledgeBaseEntry } from '@kbn/observability-ai-assistant-plugin/common'; +import { resourceNames } from '@kbn/observability-ai-assistant-plugin/server/service'; +import { isArray, isObject } from 'lodash'; +import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; +import { + addSampleDocsToInternalKb, + getConcreteWriteIndexFromAlias, + getKnowledgeBaseEntriesFromApi, + getKnowledgeBaseEntriesFromEs, + setupKnowledgeBase, + waitForKnowledgeBaseIndex, + waitForKnowledgeBaseReady, +} from '../utils/knowledge_base'; +import { restoreIndexAssets } from '../utils/index_assets'; +import { + TINY_ELSER_INFERENCE_ID, + TINY_ELSER_MODEL_ID, + TINY_TEXT_EMBEDDING_INFERENCE_ID, + TINY_TEXT_EMBEDDING_MODEL_ID, + createTinyElserInferenceEndpoint, + createTinyTextEmbeddingInferenceEndpoint, + deleteInferenceEndpoint, + deleteModel, + importModel, +} from '../utils/model_and_inference'; +import { animalSampleDocs } from '../utils/sample_docs'; + +export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { + const es = getService('es'); + const ml = getService('ml'); + const log = getService('log'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + + type KnowledgeBaseEsEntry = Awaited>[0]; + + describe('when changing from ELSER to E5-like model', function () { + let elserEntriesFromApi: KnowledgeBaseEntry[]; + let elserEntriesFromEs: KnowledgeBaseEsEntry[]; + let elserInferenceId: string; + let elserWriteIndex: string; + + let e5EntriesFromApi: KnowledgeBaseEntry[]; + let e5EntriesFromEs: KnowledgeBaseEsEntry[]; + let e5InferenceId: string; + let e5WriteIndex: string; + + before(async () => { + await importModel(ml, { modelId: TINY_ELSER_MODEL_ID }); + await createTinyElserInferenceEndpoint(getService, { inferenceId: TINY_ELSER_INFERENCE_ID }); + await setupKnowledgeBase(observabilityAIAssistantAPIClient, TINY_ELSER_INFERENCE_ID); + await waitForKnowledgeBaseReady(getService); + + // ingest documents + await addSampleDocsToInternalKb(getService, animalSampleDocs); + + elserEntriesFromApi = ( + await getKnowledgeBaseEntriesFromApi({ observabilityAIAssistantAPIClient }) + ).body.entries; + + elserEntriesFromEs = await getKnowledgeBaseEntriesFromEs(es); + elserInferenceId = await getInferenceIdFromWriteIndex({ asInternalUser: es }); + elserWriteIndex = await getConcreteWriteIndexFromAlias(es); + + // setup KB with E5-like model + await importModel(ml, { modelId: TINY_TEXT_EMBEDDING_MODEL_ID }); + await ml.api.startTrainedModelDeploymentES(TINY_TEXT_EMBEDDING_MODEL_ID); + await createTinyTextEmbeddingInferenceEndpoint(getService, { + inferenceId: TINY_TEXT_EMBEDDING_INFERENCE_ID, + }); + await setupKnowledgeBase(observabilityAIAssistantAPIClient, TINY_TEXT_EMBEDDING_INFERENCE_ID); + + await waitForKnowledgeBaseIndex(getService, '.kibana-observability-ai-assistant-kb-000002'); + await waitForKnowledgeBaseReady(getService); + + e5EntriesFromApi = ( + await getKnowledgeBaseEntriesFromApi({ observabilityAIAssistantAPIClient }) + ).body.entries; + + e5EntriesFromEs = await getKnowledgeBaseEntriesFromEs(es); + e5InferenceId = await getInferenceIdFromWriteIndex({ asInternalUser: es }); + e5WriteIndex = await getConcreteWriteIndexFromAlias(es); + }); + + after(async () => { + // ELSER + await deleteModel(getService, { modelId: TINY_ELSER_MODEL_ID }); + await deleteInferenceEndpoint(getService, { inferenceId: TINY_ELSER_INFERENCE_ID }); + + // E5-like + await deleteModel(getService, { modelId: TINY_TEXT_EMBEDDING_MODEL_ID }); + await deleteInferenceEndpoint(getService, { inferenceId: TINY_TEXT_EMBEDDING_INFERENCE_ID }); + + await restoreIndexAssets(observabilityAIAssistantAPIClient, es); + }); + + describe('when model is ELSER', () => { + it('has correct write index name', async () => { + expect(elserWriteIndex).to.be(`${resourceNames.writeIndexAlias.kb}-000001`); + }); + + it('has correct number of entries', async () => { + expect(elserEntriesFromApi).to.have.length(5); + expect(elserEntriesFromEs).to.have.length(5); + }); + + it('has correct ELSER inference id', async () => { + expect(elserInferenceId).to.be(TINY_ELSER_INFERENCE_ID); + }); + + it('has sparse embeddings', async () => { + const embeddings = getEmbeddings(e5EntriesFromEs); + + const hasSparseEmbeddings = embeddings.every((embedding) => { + return ( + isObject(embedding) && + Object.values(embedding).every((value) => typeof value === 'number') + ); + }); + + if (!hasSparseEmbeddings) { + log.warning('Must be sparse embeddings. Found:', JSON.stringify(embeddings, null, 2)); + } + + expect(hasSparseEmbeddings).to.be(true); + }); + }); + + describe('when model is changed to E5', () => { + it('has increments the index name', async () => { + expect(e5WriteIndex).to.be(`${resourceNames.writeIndexAlias.kb}-000002`); + }); + + it('returns the same entries from the API', async () => { + expect(e5EntriesFromApi).to.eql(elserEntriesFromApi); + }); + + it('has updates the inference id', async () => { + expect(e5InferenceId).to.be(TINY_TEXT_EMBEDDING_INFERENCE_ID); + }); + + it('has dense embeddings', async () => { + const embeddings = getEmbeddings(e5EntriesFromEs); + + // dense embeddings are modelled as arrays of numbers + const hasDenseEmbeddings = embeddings.every((embedding) => { + return isArray(embedding) && embedding.every((value) => typeof value === 'number'); + }); + + if (!hasDenseEmbeddings) { + log.warning('Must be dense embeddings. Found:', JSON.stringify(embeddings, null, 2)); + } + + expect(hasDenseEmbeddings).to.be(true); + }); + }); + + function getEmbeddings(hits: KnowledgeBaseEsEntry[]) { + return hits.flatMap((hit) => { + return hit._source!._inference_fields.semantic_text.inference.chunks.semantic_text.map( + (chunk) => chunk.embeddings + ); + }); + } + }); +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_reindex_and_populate_missing_semantic_text_fields.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_reindex_and_populate_missing_semantic_text_fields.spec.ts deleted file mode 100644 index 94c0fcccef5fa..0000000000000 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_reindex_and_populate_missing_semantic_text_fields.spec.ts +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { orderBy } from 'lodash'; -import expect from '@kbn/expect'; -import { AI_ASSISTANT_KB_INFERENCE_ID } from '@kbn/observability-ai-assistant-plugin/server/service/inference_endpoint'; -import { SearchResponse } from '@elastic/elasticsearch/lib/api/typesWithBodyKey'; -import { KnowledgeBaseEntry } from '@kbn/observability-ai-assistant-plugin/common'; -import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; -import { - deleteKnowledgeBaseModel, - clearKnowledgeBase, - setupKnowledgeBase, -} from '../utils/knowledge_base'; -import { restoreIndexAssets } from '../utils/index_assets'; - -interface InferenceChunk { - text: string; - embeddings: any; -} - -interface InferenceData { - inference_id: string; - chunks: { - semantic_text: InferenceChunk[]; - }; -} - -interface SemanticTextField { - semantic_text: string; - _inference_fields?: { - semantic_text?: { - inference: InferenceData; - }; - }; -} - -export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { - const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); - const esArchiver = getService('esArchiver'); - const es = getService('es'); - const retry = getService('retry'); - - const archive = - 'x-pack/test/functional/es_archives/observability/ai_assistant/knowledge_base_8_15'; - - async function getKnowledgeBaseEntries() { - const res = (await es.search({ - index: '.kibana-observability-ai-assistant-kb*', - // Add fields parameter to include inference metadata - fields: ['_inference_fields'], - body: { - query: { - match_all: {}, - }, - }, - })) as SearchResponse; - - return res.hits.hits; - } - - describe('when the knowledge base index was created before 8.15', function () { - // Intentionally skipped in all serverless environnments (local and MKI) - // because the migration scenario being tested is not relevant to MKI and Serverless. - this.tags(['skipServerless']); - - before(async () => { - await deleteKnowledgeBaseModel(getService); - await restoreIndexAssets(observabilityAIAssistantAPIClient, es); - await clearKnowledgeBase(es); - await esArchiver.load(archive); - await setupKnowledgeBase(getService); - }); - - after(async () => { - await deleteKnowledgeBaseModel(getService); - await restoreIndexAssets(observabilityAIAssistantAPIClient, es); - }); - - describe('before migrating', () => { - it('the docs do not have semantic_text embeddings', async () => { - const hits = await getKnowledgeBaseEntries(); - const hasSemanticTextEmbeddings = hits.some((hit) => hit._source?.semantic_text); - expect(hasSemanticTextEmbeddings).to.be(false); - }); - }); - - describe('after migrating', () => { - before(async () => { - const { status } = await observabilityAIAssistantAPIClient.editor({ - endpoint: - 'POST /internal/observability_ai_assistant/kb/migrations/populate_missing_semantic_text_field', - }); - expect(status).to.be(200); - }); - - it('the docs have semantic_text embeddings', async () => { - await retry.try(async () => { - const hits = await getKnowledgeBaseEntries(); - const hasSemanticTextEmbeddings = hits.every((hit) => hit._source?.semantic_text); - expect(hasSemanticTextEmbeddings).to.be(true); - - expect( - orderBy(hits, '_source.title').map(({ _source }) => { - const text = _source?.semantic_text; - const inference = _source?._inference_fields?.semantic_text?.inference; - - return { - text: text ?? '', - inferenceId: inference?.inference_id, - chunkCount: inference?.chunks?.semantic_text?.length, - }; - }) - ).to.eql([ - { - text: 'To infinity and beyond!', - inferenceId: AI_ASSISTANT_KB_INFERENCE_ID, - chunkCount: 1, - }, - { - text: "The user's favourite color is blue.", - inferenceId: AI_ASSISTANT_KB_INFERENCE_ID, - chunkCount: 1, - }, - ]); - }); - }); - - it('returns entries correctly via API', async () => { - const { status } = await observabilityAIAssistantAPIClient.editor({ - endpoint: - 'POST /internal/observability_ai_assistant/kb/migrations/populate_missing_semantic_text_field', - }); - - expect(status).to.be(200); - - const res = await observabilityAIAssistantAPIClient.editor({ - endpoint: 'GET /internal/observability_ai_assistant/kb/entries', - params: { - query: { - query: '', - sortBy: 'title', - sortDirection: 'asc', - }, - }, - }); - - expect(res.status).to.be(200); - - expect( - res.body.entries.map(({ title, text, role, type }) => ({ title, text, role, type })) - ).to.eql([ - { - role: 'user_entry', - title: 'Toy Story quote', - type: 'contextual', - text: 'To infinity and beyond!', - }, - { - role: 'assistant_summarization', - title: "User's favourite color", - type: 'contextual', - text: "The user's favourite color is blue.", - }, - ]); - }); - }); - }); -} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_reindex_concurrency.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_reindex_concurrency.spec.ts index 9051b00261d28..fb9f5ed62cf49 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_reindex_concurrency.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_reindex_concurrency.spec.ts @@ -7,15 +7,20 @@ import expect from '@kbn/expect'; import { times } from 'lodash'; +import { resourceNames } from '@kbn/observability-ai-assistant-plugin/server/service'; import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; import { - deleteKnowledgeBaseModel, - setupKnowledgeBase, deleteKbIndices, addSampleDocsToInternalKb, + getConcreteWriteIndexFromAlias, + reIndexKnowledgeBase, } from '../utils/knowledge_base'; import { createOrUpdateIndexAssets } from '../utils/index_assets'; import { animalSampleDocs } from '../utils/sample_docs'; +import { + deployTinyElserAndSetupKb, + teardownTinyElserModelAndInferenceEndpoint, +} from '../utils/model_and_inference'; export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); @@ -29,13 +34,13 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon before(async () => { await deleteKbIndices(es); await createOrUpdateIndexAssets(observabilityAIAssistantAPIClient); - await setupKnowledgeBase(getService); + await deployTinyElserAndSetupKb(getService); }); after(async () => { await deleteKbIndices(es); await createOrUpdateIndexAssets(observabilityAIAssistantAPIClient); - await deleteKnowledgeBaseModel(getService); + await teardownTinyElserModelAndInferenceEndpoint(getService); }); describe('when running multiple re-index operations in parallel', () => { @@ -47,7 +52,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon before(async () => { await addSampleDocsToInternalKb(getService, animalSampleDocs); - results = await Promise.all(times(20).map(() => reIndexKnowledgeBase())); + results = await Promise.all(times(20).map(() => _reIndexKnowledgeBase())); }); it('makes 20 requests to the reindex endpoint', async () => { @@ -59,12 +64,12 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon expect(successResults).to.have.length(1); }); - it('should fail every request but 1', async () => { + it('should fail all requests but 1', async () => { const failures = results.filter((result) => result.status !== 200); expect(failures).to.have.length(19); }); - it('throw a LockAcquisitionException for the failing requests', async () => { + it('should throw a LockAcquisitionException for the failing requests', async () => { const failures = results.filter((result) => result.status === 500); const errorMessages = failures.every( (result) => @@ -75,23 +80,29 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon }); }); - describe('when running multiple re-index operations in sequence', () => { + const iterations = 5; + describe(`when running ${iterations} re-index operations in sequence`, () => { let results: Array<{ status: number; result: boolean; errorMessage: string | undefined }>; + let initialIndexSequenceNumber: number; before(async () => { + const writeIndex = await getConcreteWriteIndexFromAlias(es); + // get sequence number from write index + initialIndexSequenceNumber = parseInt(writeIndex.slice(-6), 10); + results = []; - for (const _ of times(20)) { - results.push(await reIndexKnowledgeBase()); + for (const _ of times(iterations)) { + results.push(await _reIndexKnowledgeBase()); } }); - it('makes 20 requests', async () => { - expect(results).to.have.length(20); + it(`makes ${iterations} requests`, async () => { + expect(results).to.have.length(iterations); }); it('every re-index operation succeeds', async () => { const successResults = results.filter((result) => result.status === 200); - expect(successResults).to.have.length(20); + expect(successResults).to.have.length(iterations); expect(successResults.every((r) => r.result === true)).to.be(true); }); @@ -99,13 +110,19 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon const failures = results.filter((result) => result.status !== 200); expect(failures).to.have.length(0); }); + + it('should increment the write index sequence number', async () => { + const writeIndex = await getConcreteWriteIndexFromAlias(es); + const sequenceNumber = (iterations + initialIndexSequenceNumber) + .toString() + .padStart(6, '0'); // e.g. 000021 + expect(writeIndex).to.be(`${resourceNames.writeIndexAlias.kb}-${sequenceNumber}`); + }); }); }); - async function reIndexKnowledgeBase() { - const res = await observabilityAIAssistantAPIClient.editor({ - endpoint: 'POST /internal/observability_ai_assistant/kb/reindex', - }); + async function _reIndexKnowledgeBase() { + const res = await reIndexKnowledgeBase(observabilityAIAssistantAPIClient); return { status: res.status, diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_reindex_to_fix_sparse_vector_support.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_reindex_to_fix_sparse_vector_support.spec.ts deleted file mode 100644 index e9afa6c29fe8f..0000000000000 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_reindex_to_fix_sparse_vector_support.spec.ts +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import expect from '@kbn/expect'; -import { resourceNames } from '@kbn/observability-ai-assistant-plugin/server/service'; -import AdmZip from 'adm-zip'; -import path from 'path'; -import { AI_ASSISTANT_SNAPSHOT_REPO_PATH } from '../../../../default_configs/stateful.config.base'; -import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; -import { - deleteKbIndices, - deleteKnowledgeBaseModel, - setupKnowledgeBase, -} from '../utils/knowledge_base'; -import { createOrUpdateIndexAssets, restoreIndexAssets } from '../utils/index_assets'; - -export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { - const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); - const es = getService('es'); - const retry = getService('retry'); - const log = getService('log'); - - describe('when the knowledge base index was created before 8.11', function () { - // Intentionally skipped in all serverless environnments (local and MKI) - // because the migration scenario being tested is not relevant to MKI and Serverless. - this.tags(['skipServerless']); - - before(async () => { - await unZipKbSnapshot(); - await setupKnowledgeBase(getService); - }); - - beforeEach(async () => { - await restoreKbSnapshot(); - await createOrUpdateIndexAssets(observabilityAIAssistantAPIClient); - }); - - after(async () => { - await restoreIndexAssets(observabilityAIAssistantAPIClient, es); - await deleteKnowledgeBaseModel(getService); - }); - - it('has an index created version earlier than 8.11', async () => { - await retry.try(async () => { - expect(await getKbIndexCreatedVersion()).to.be.lessThan(8110000); - }); - }); - - function createKnowledgeBaseEntry() { - const knowledgeBaseEntry = { - id: 'my-doc-id-1', - title: 'My title', - text: 'My content', - }; - - return observabilityAIAssistantAPIClient.editor({ - endpoint: 'POST /internal/observability_ai_assistant/kb/entries/save', - params: { body: knowledgeBaseEntry }, - }); - } - - it('cannot add new entries to KB', async () => { - const { status, body } = await createKnowledgeBaseEntry(); - - // @ts-expect-error - expect(body.message).to.eql( - 'The index ".kibana-observability-ai-assistant-kb" does not support semantic text and must be reindexed. This re-index operation has been scheduled and will be started automatically. Please try again later.' - ); - - expect(status).to.be(503); - }); - - it('can add new entries after re-indexing', async () => { - await reIndexKnowledgeBase(); - - await retry.try(async () => { - const { status } = await createKnowledgeBaseEntry(); - expect(status).to.be(200); - }); - }); - }); - - async function getKbIndexCreatedVersion() { - const indexSettings = await es.indices.getSettings({ - index: resourceNames.concreteIndexName.kb, - }); - - const { settings } = Object.values(indexSettings)[0]; - return parseInt(settings?.index?.version?.created ?? '', 10); - } - - async function unZipKbSnapshot() { - const zipFilePath = `${AI_ASSISTANT_SNAPSHOT_REPO_PATH}.zip`; - log.debug(`Unzipping ${zipFilePath} to ${AI_ASSISTANT_SNAPSHOT_REPO_PATH}`); - new AdmZip(zipFilePath).extractAllTo(path.dirname(AI_ASSISTANT_SNAPSHOT_REPO_PATH), true); - } - - async function restoreKbSnapshot() { - await deleteKbIndices(es); - - log.debug( - `Restoring snapshot of ${resourceNames.concreteIndexName.kb} from ${AI_ASSISTANT_SNAPSHOT_REPO_PATH}` - ); - const snapshotRepoName = 'snapshot-repo-8-10'; - const snapshotName = 'my_snapshot'; - await es.snapshot.createRepository({ - name: snapshotRepoName, - repository: { - type: 'fs', - settings: { location: AI_ASSISTANT_SNAPSHOT_REPO_PATH }, - }, - }); - - await es.snapshot.restore({ - repository: snapshotRepoName, - snapshot: snapshotName, - wait_for_completion: true, - body: { - indices: resourceNames.concreteIndexName.kb, - }, - }); - - await es.snapshot.deleteRepository({ name: snapshotRepoName }); - } - - async function reIndexKnowledgeBase() { - const { status } = await observabilityAIAssistantAPIClient.editor({ - endpoint: 'POST /internal/observability_ai_assistant/kb/reindex', - }); - expect(status).to.be(200); - } -} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_setup.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_setup.spec.ts index 27284746bec29..fc39b3b4d2164 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_setup.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_setup.spec.ts @@ -6,58 +6,166 @@ */ import expect from '@kbn/expect'; +import { resourceNames } from '@kbn/observability-ai-assistant-plugin/server/service'; +import { getInferenceIdFromWriteIndex } from '@kbn/observability-ai-assistant-plugin/server/service/knowledge_base_service/get_inference_id_from_write_index'; import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; -import { TINY_ELSER, deleteKnowledgeBaseModel, setupKnowledgeBase } from '../utils/knowledge_base'; -import { restoreIndexAssets } from '../utils/index_assets'; +import { getComponentTemplate, restoreIndexAssets } from '../utils/index_assets'; +import { + TINY_ELSER_INFERENCE_ID, + TINY_ELSER_MODEL_ID, + createTinyElserInferenceEndpoint, + deleteInferenceEndpoint, + deployTinyElserAndSetupKb, + importModel, + deleteModel, + teardownTinyElserModelAndInferenceEndpoint, +} from '../utils/model_and_inference'; +import { + getConcreteWriteIndexFromAlias, + waitForKnowledgeBaseReady, + setupKnowledgeBase, +} from '../utils/knowledge_base'; export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { const es = getService('es'); + const retry = getService('retry'); + const ml = getService('ml'); const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); describe('/internal/observability_ai_assistant/kb/setup', function () { before(async () => { - await deleteKnowledgeBaseModel(getService); + await teardownTinyElserModelAndInferenceEndpoint(getService); await restoreIndexAssets(observabilityAIAssistantAPIClient, es); }); afterEach(async () => { - await deleteKnowledgeBaseModel(getService); + await teardownTinyElserModelAndInferenceEndpoint(getService); await restoreIndexAssets(observabilityAIAssistantAPIClient, es); }); - it('returns model info when successful', async () => { - const res = await setupKnowledgeBase(getService); + it('returns 200 when model is deployed', async () => { + const { status } = await deployTinyElserAndSetupKb(getService); + expect(status).to.be(200); + }); + + it('returns 200 if model is not deployed', async () => { + const { status } = await setupKbAsAdmin(TINY_ELSER_INFERENCE_ID); + expect(status).to.be(200); + }); + + it('has "pt_tiny_elser_inference_id" as initial inference id', async () => { + const inferenceId = await getInferenceIdFromWriteIndex({ asInternalUser: es }); + expect(inferenceId).to.be(TINY_ELSER_INFERENCE_ID); + }); + + describe('re-indexing', () => { + describe('running setup for a different inference endpoint', () => { + const CUSTOM_TINY_ELSER_INFERENCE_ID = 'custom_tiny_elser_inference_id'; + let body: Awaited>['body']; + + before(async () => { + // setup KB initially + await deployTinyElserAndSetupKb(getService); + + // setup KB with custom inference endpoint + await createTinyElserInferenceEndpoint(getService, { + inferenceId: CUSTOM_TINY_ELSER_INFERENCE_ID, + }); + const res = await setupKbAsAdmin(CUSTOM_TINY_ELSER_INFERENCE_ID); + body = res.body; + + await waitForKnowledgeBaseReady(getService); + }); + + after(async () => { + await deleteInferenceEndpoint(getService, { + inferenceId: CUSTOM_TINY_ELSER_INFERENCE_ID, + }); + }); + + it('should re-index the KB', async () => { + expect(body.reindex).to.be(true); + expect(body.currentInferenceId).to.be(TINY_ELSER_INFERENCE_ID); + expect(body.nextInferenceId).to.be(CUSTOM_TINY_ELSER_INFERENCE_ID); + await expectWriteIndexName(`${resourceNames.writeIndexAlias.kb}-000002`); + }); + }); + + describe('running setup for the same inference id', () => { + let body: Awaited>['body']; - expect(res.body.service_settings.model_id).to.be('pt_tiny_elser'); - expect(res.body.inference_id).to.be('obs_ai_assistant_kb_inference'); + before(async () => { + await deployTinyElserAndSetupKb(getService); + const res = await setupKbAsAdmin(TINY_ELSER_INFERENCE_ID); + body = res.body; + }); + + it('does not re-index', async () => { + expect(body.reindex).to.be(false); + expect(body.currentInferenceId).to.be(TINY_ELSER_INFERENCE_ID); + expect(body.nextInferenceId).to.be(TINY_ELSER_INFERENCE_ID); + await expectWriteIndexName(`${resourceNames.writeIndexAlias.kb}-000001`); + }); + }); }); - it('returns error message if model is not deployed', async () => { - const res = await setupKnowledgeBase(getService, { deployModel: false }); + describe('when installing a custom inference endpoint', function () { + const customInferenceId = 'my_custom_inference_id'; - expect(res.status).to.be(500); + before(async () => { + await restoreIndexAssets(observabilityAIAssistantAPIClient, es); + await importModel(ml, { modelId: TINY_ELSER_MODEL_ID }); + await createTinyElserInferenceEndpoint(getService, { + inferenceId: customInferenceId, + }); + await setupKnowledgeBase(observabilityAIAssistantAPIClient, customInferenceId); + await waitForKnowledgeBaseReady(getService); + }); + + after(async () => { + await deleteModel(getService, { modelId: TINY_ELSER_MODEL_ID }); + await deleteInferenceEndpoint(getService, { inferenceId: customInferenceId }); + }); - // @ts-expect-error - expect(res.body.message).to.include.string( - 'No known trained model with model_id [pt_tiny_elser]' - ); + it('has correct semantic_text mapping in component template', async () => { + const res = await getComponentTemplate(es); + const semanticTextMapping = res.component_template.template.mappings?.properties + ?.semantic_text as { inference_id: string }; - // @ts-expect-error - expect(res.body.statusCode).to.be(500); + expect(semanticTextMapping.inference_id).to.be(customInferenceId); + }); }); describe('security roles and access privileges', () => { it('should deny access for users without the ai_assistant privilege', async () => { - const { status } = await observabilityAIAssistantAPIClient.viewer({ - endpoint: 'POST /internal/observability_ai_assistant/kb/setup', - params: { - query: { - model_id: TINY_ELSER.id, - }, - }, - }); + const { status } = await setupKbAsViewer(TINY_ELSER_INFERENCE_ID); expect(status).to.be(403); }); }); }); + + async function expectWriteIndexName(expectedName: string) { + await retry.try(async () => { + const writeIndex = await getConcreteWriteIndexFromAlias(es); + expect(writeIndex).to.be(expectedName); + }); + } + + function setupKbAsAdmin(inferenceId: string) { + return observabilityAIAssistantAPIClient.admin({ + endpoint: 'POST /internal/observability_ai_assistant/kb/setup', + params: { + query: { inference_id: inferenceId }, + }, + }); + } + + function setupKbAsViewer(inferenceId: string) { + return observabilityAIAssistantAPIClient.viewer({ + endpoint: 'POST /internal/observability_ai_assistant/kb/setup', + params: { + query: { inference_id: inferenceId }, + }, + }); + } } diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_status.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_status.spec.ts index 5572986d615e2..07edcbab12704 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_status.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_status.spec.ts @@ -6,69 +6,99 @@ */ import expect from '@kbn/expect'; +import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/common'; import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; import { - deleteKnowledgeBaseModel, - TINY_ELSER, + teardownTinyElserModelAndInferenceEndpoint, deleteInferenceEndpoint, - setupKnowledgeBase, -} from '../utils/knowledge_base'; + deployTinyElserAndSetupKb, + TINY_ELSER_MODEL_ID, + TINY_ELSER_INFERENCE_ID, + deleteModel, + stopTinyElserModel, +} from '../utils/model_and_inference'; export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { - const es = getService('es'); const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); describe('/internal/observability_ai_assistant/kb/status', function () { - beforeEach(async () => { - await setupKnowledgeBase(getService); - }); - - afterEach(async () => { - await deleteKnowledgeBaseModel(getService); - }); + // see details: https://github.com/elastic/kibana/issues/219217 + this.tags(['failsOnMKI']); - it('returns correct status after knowledge base is setup', async () => { + it('returns correct status before knowledge base is setup', async () => { const res = await observabilityAIAssistantAPIClient.editor({ endpoint: 'GET /internal/observability_ai_assistant/kb/status', }); expect(res.status).to.be(200); - expect(res.body.ready).to.be(true); + expect(res.body.kbState).to.be(KnowledgeBaseState.NOT_INSTALLED); expect(res.body.enabled).to.be(true); - expect(res.body.endpoint?.service_settings?.model_id).to.eql(TINY_ELSER.id); }); - it('returns correct status after model is deleted', async () => { - await deleteKnowledgeBaseModel(getService, { shouldDeleteInferenceEndpoint: false }); + describe('after the knowledge base has been set up', () => { + before(async () => { + await deployTinyElserAndSetupKb(getService); + }); - const res = await observabilityAIAssistantAPIClient.editor({ - endpoint: 'GET /internal/observability_ai_assistant/kb/status', + after(async () => { + await teardownTinyElserModelAndInferenceEndpoint(getService); }); - expect(res.status).to.be(200); + it('returns the correct status when the knowledge base is successfully installed', async () => { + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/kb/status', + }); - expect(res.body.ready).to.be(false); - expect(res.body.enabled).to.be(true); - expect(res.body.errorMessage).to.include.string( - 'No known trained model with model_id [pt_tiny_elser]' - ); - }); + expect(res.status).to.be(200); - it('returns correct status after inference endpoint is deleted', async () => { - await deleteInferenceEndpoint({ es }); + expect(res.body.kbState).to.be(KnowledgeBaseState.READY); + expect(res.body.enabled).to.be(true); + expect(res.body.endpoint?.service_settings?.model_id).to.eql(TINY_ELSER_MODEL_ID); + }); - const res = await observabilityAIAssistantAPIClient.editor({ - endpoint: 'GET /internal/observability_ai_assistant/kb/status', + it('returns MODEL_PENDING_DEPLOYMENT status after the model deployment is stopped', async () => { + await stopTinyElserModel(getService); + + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/kb/status', + }); + + expect(res.status).to.be(200); + expect(res.body.kbState).to.be(KnowledgeBaseState.MODEL_PENDING_DEPLOYMENT); }); - expect(res.status).to.be(200); + it('returns the correct status after the model is deleted', async () => { + await deleteModel(getService, { modelId: TINY_ELSER_MODEL_ID }); - expect(res.body.ready).to.be(false); - expect(res.body.enabled).to.be(true); - expect(res.body.errorMessage).to.include.string( - 'Inference endpoint not found [obs_ai_assistant_kb_inference]' - ); + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/kb/status', + }); + + expect(res.status).to.be(200); + + expect(res.body.kbState).to.be(KnowledgeBaseState.NOT_INSTALLED); + expect(res.body.enabled).to.be(true); + expect(res.body.errorMessage).to.include.string( + 'No known trained model with model_id [pt_tiny_elser]' + ); + }); + + it('returns the correct status after inference endpoint is deleted', async () => { + await deleteInferenceEndpoint(getService, { inferenceId: TINY_ELSER_INFERENCE_ID }); + + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/kb/status', + }); + + expect(res.status).to.be(200); + + expect(res.body.kbState).to.be(KnowledgeBaseState.NOT_INSTALLED); + expect(res.body.enabled).to.be(true); + expect(res.body.errorMessage).to.include.string( + 'Inference endpoint not found [pt_tiny_elser_inference_id]' + ); + }); }); describe('security roles and access privileges', () => { @@ -76,6 +106,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon const { status } = await observabilityAIAssistantAPIClient.viewer({ endpoint: 'GET /internal/observability_ai_assistant/kb/status', }); + expect(status).to.be(403); }); }); diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_user_instructions.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_user_instructions.spec.ts index a88373ebcd42a..96055d962fbc4 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_user_instructions.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_user_instructions.spec.ts @@ -12,16 +12,16 @@ import { CONTEXT_FUNCTION_NAME } from '@kbn/observability-ai-assistant-plugin/se import { Instruction } from '@kbn/observability-ai-assistant-plugin/common/types'; import pRetry from 'p-retry'; import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; -import { - clearKnowledgeBase, - deleteKnowledgeBaseModel, - setupKnowledgeBase, -} from '../utils/knowledge_base'; +import { clearKnowledgeBase } from '../utils/knowledge_base'; import { LlmProxy, createLlmProxy, } from '../../../../../../observability_ai_assistant_api_integration/common/create_llm_proxy'; import { clearConversations, getConversationCreatedEvent } from '../utils/conversation'; +import { + deployTinyElserAndSetupKb, + teardownTinyElserModelAndInferenceEndpoint, +} from '../utils/model_and_inference'; const sortById = (data: Array) => sortBy(data, 'id'); @@ -33,11 +33,11 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon describe('Knowledge base user instructions', function () { before(async () => { - await setupKnowledgeBase(getService); + await deployTinyElserAndSetupKb(getService); }); after(async () => { - await deleteKnowledgeBaseModel(getService); + await teardownTinyElserModelAndInferenceEndpoint(getService); await clearKnowledgeBase(es); await clearConversations(es); }); @@ -274,7 +274,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon expect(status).to.be(200); void proxy.interceptTitle('This is a conversation title'); - void proxy.interceptConversation('I, the LLM, hear you!'); + void proxy.interceptWithResponse('I, the LLM, hear you!'); const messages: Message[] = [ { @@ -440,7 +440,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon }); it('includes private KB instructions in the system message sent to the LLM', async () => { - const simulatorPromise = proxy.interceptConversation('Hello from LLM Proxy'); + const simulatorPromise = proxy.interceptWithResponse('Hello from LLM Proxy'); const messages: Message[] = [ { '@timestamp': new Date().toISOString(), diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_warmup_model.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_warmup_model.spec.ts new file mode 100644 index 0000000000000..07df98671cfab --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_warmup_model.spec.ts @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import expect from '@kbn/expect'; +import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; +import { + TINY_ELSER_INFERENCE_ID, + teardownTinyElserModelAndInferenceEndpoint, + setupTinyElserModelAndInferenceEndpoint, +} from '../utils/model_and_inference'; + +export default function WarmupModelApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + + function warmupKbAsAdmin(inferenceId: string) { + return observabilityAIAssistantAPIClient.admin({ + endpoint: 'POST /internal/observability_ai_assistant/kb/warmup_model', + params: { + query: { + inference_id: inferenceId, + }, + }, + }); + } + + function warmupKbAsViewer(inferenceId: string) { + return observabilityAIAssistantAPIClient.viewer({ + endpoint: 'POST /internal/observability_ai_assistant/kb/warmup_model', + params: { + query: { + inference_id: inferenceId, + }, + }, + }); + } + + describe('/internal/observability_ai_assistant/kb/warmup_model', function () { + const inferenceId = TINY_ELSER_INFERENCE_ID; + + before(async () => { + await setupTinyElserModelAndInferenceEndpoint(getService); + }); + + after(async () => { + await teardownTinyElserModelAndInferenceEndpoint(getService); + }); + + it('returns 200 and triggers model warmup', async () => { + const response = await warmupKbAsAdmin(inferenceId); + expect(response.status).to.be(200); + }); + + it('should deny access for users without the ai_assistant privilege', async () => { + const response = await warmupKbAsViewer(inferenceId); + expect(response.status).to.be(403); + }); + }); +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshot_kb_8.10.zip b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshot_kb_8.10.zip deleted file mode 100644 index 0e65dd1848246..0000000000000 Binary files a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshot_kb_8.10.zip and /dev/null differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/index-0 b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/index-0 new file mode 100644 index 0000000000000..3d04ed954acc4 --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/index-0 @@ -0,0 +1,33 @@ +{ + "min_version": "7.12.0", + "uuid": "Z32sceNySTyUXskXH4H93g", + "cluster_id": "pN2COvnNRlioIBagZPodZA", + "snapshots": [ + { + "name": "my_snapshot", + "uuid": "9Wej6Q4gRZWSblPzNrtLOQ", + "state": 1, + "index_metadata_lookup": { + "oYscBShISaWpDm7n1KXkCA": "X0NlkruTSgCxTsBO_mA37g-_na_-3-1-1" + }, + "version": 8100599, + "start_time_millis": 1739275733654, + "end_time_millis": 1739275734671, + "slm_policy": "" + } + ], + "indices": { + ".kibana-observability-ai-assistant-kb-000001": { + "id": "oYscBShISaWpDm7n1KXkCA", + "snapshots": [ + "9Wej6Q4gRZWSblPzNrtLOQ" + ], + "shard_generations": [ + "KxGoXW9rQlmlCfPaW8lMYA" + ] + } + }, + "index_metadata_identifiers": { + "X0NlkruTSgCxTsBO_mA37g-_na_-3-1-1": "3cLp9JQBDFLNeO2pjufJ" + } +} \ No newline at end of file diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/index.latest b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/index.latest new file mode 100644 index 0000000000000..1b1cb4d44c57c Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/index.latest differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/indices/oYscBShISaWpDm7n1KXkCA/0/index-KxGoXW9rQlmlCfPaW8lMYA b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/indices/oYscBShISaWpDm7n1KXkCA/0/index-KxGoXW9rQlmlCfPaW8lMYA new file mode 100644 index 0000000000000..a84a4003c8581 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/indices/oYscBShISaWpDm7n1KXkCA/0/index-KxGoXW9rQlmlCfPaW8lMYA differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/indices/oYscBShISaWpDm7n1KXkCA/0/snap-9Wej6Q4gRZWSblPzNrtLOQ.dat b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/indices/oYscBShISaWpDm7n1KXkCA/0/snap-9Wej6Q4gRZWSblPzNrtLOQ.dat new file mode 100644 index 0000000000000..f0d10207322e7 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/indices/oYscBShISaWpDm7n1KXkCA/0/snap-9Wej6Q4gRZWSblPzNrtLOQ.dat differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/indices/oYscBShISaWpDm7n1KXkCA/meta-3cLp9JQBDFLNeO2pjufJ.dat b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/indices/oYscBShISaWpDm7n1KXkCA/meta-3cLp9JQBDFLNeO2pjufJ.dat new file mode 100644 index 0000000000000..32b412287c81a Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/indices/oYscBShISaWpDm7n1KXkCA/meta-3cLp9JQBDFLNeO2pjufJ.dat differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/meta-9Wej6Q4gRZWSblPzNrtLOQ.dat b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/meta-9Wej6Q4gRZWSblPzNrtLOQ.dat new file mode 100644 index 0000000000000..b5c6fc7479ceb Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/meta-9Wej6Q4gRZWSblPzNrtLOQ.dat differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/snap-9Wej6Q4gRZWSblPzNrtLOQ.dat b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/snap-9Wej6Q4gRZWSblPzNrtLOQ.dat new file mode 100644 index 0000000000000..b28bf7e224a4c Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.10/snap-9Wej6Q4gRZWSblPzNrtLOQ.dat differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/index-2 b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/index-2 new file mode 100644 index 0000000000000..36a99d44334c9 --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/index-2 @@ -0,0 +1 @@ +{"min_version":"7.12.0","uuid":"bPmLsBiwRb2PstKIPFVa9g","cluster_id":"RiFYfwi9SL-ADRkzOQBk6w","snapshots":[{"name":"kb_snapshot_8.16","uuid":"y1zcTBUTQFuKFwMMbi7VvQ","state":1,"index_metadata_lookup":{"2MSR7bXrSyyoo2ZmErlBkg":"AwNXKEXNQO6os9NaVvvuQg-_na_-2-2-1"},"version":"8.11.0","index_version":8518000,"start_time_millis":1746016036173,"end_time_millis":1746016036580,"slm_policy":""}],"indices":{".kibana-observability-ai-assistant-kb-000001":{"id":"2MSR7bXrSyyoo2ZmErlBkg","snapshots":["y1zcTBUTQFuKFwMMbi7VvQ"],"shard_generations":["FWDT04_LTIaFE_BxwPeJfA"]}},"index_metadata_identifiers":{"AwNXKEXNQO6os9NaVvvuQg-_na_-2-2-1":"ZOmqhpYBNSJu_urmZ8Mp"}} \ No newline at end of file diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/index.latest b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/index.latest new file mode 100644 index 0000000000000..ccfcbf4136d77 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/index.latest differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__06CPqevQQOmvowG1MpCD-g b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__06CPqevQQOmvowG1MpCD-g new file mode 100644 index 0000000000000..7565784e442cf Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__06CPqevQQOmvowG1MpCD-g differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__4vijD3jIRDKIsqLVeCyiRg b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__4vijD3jIRDKIsqLVeCyiRg new file mode 100644 index 0000000000000..03980a7b710c4 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__4vijD3jIRDKIsqLVeCyiRg differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__67_8A2HNTvS9VX_pNYgoGw b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__67_8A2HNTvS9VX_pNYgoGw new file mode 100644 index 0000000000000..7ecdcb8460701 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__67_8A2HNTvS9VX_pNYgoGw differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__BLiiHnFWQhW5EoLYh86Csg b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__BLiiHnFWQhW5EoLYh86Csg new file mode 100644 index 0000000000000..bdb52cd4e1a42 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__BLiiHnFWQhW5EoLYh86Csg differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__BWt56U-gTom2lEcvFItpdg b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__BWt56U-gTom2lEcvFItpdg new file mode 100644 index 0000000000000..bb3afe1549f74 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__BWt56U-gTom2lEcvFItpdg differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__D420iyS_RrOpKLE5V4CKYA b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__D420iyS_RrOpKLE5V4CKYA new file mode 100644 index 0000000000000..6a2711986206b Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__D420iyS_RrOpKLE5V4CKYA differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__D_1rlwiZSf26cF3O5bxszQ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__D_1rlwiZSf26cF3O5bxszQ new file mode 100644 index 0000000000000..bc87d6192dec2 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__D_1rlwiZSf26cF3O5bxszQ differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__FxtJJJycSD2Seyxdr6QVGw b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__FxtJJJycSD2Seyxdr6QVGw new file mode 100644 index 0000000000000..303926ad119ce Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__FxtJJJycSD2Seyxdr6QVGw differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__GwPjXn70RheLT-KpbFv81w b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__GwPjXn70RheLT-KpbFv81w new file mode 100644 index 0000000000000..95a76d6320e77 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__GwPjXn70RheLT-KpbFv81w differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__H34FA1J7SoWVKoGOWJlSiw b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__H34FA1J7SoWVKoGOWJlSiw new file mode 100644 index 0000000000000..cf79d85cadd65 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__H34FA1J7SoWVKoGOWJlSiw differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__LAUFvO8FTlaqxCpF7pLXRw b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__LAUFvO8FTlaqxCpF7pLXRw new file mode 100644 index 0000000000000..b8dc954a0cbde Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__LAUFvO8FTlaqxCpF7pLXRw differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__Lw5ZKbaITR64_ormwZVmog b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__Lw5ZKbaITR64_ormwZVmog new file mode 100644 index 0000000000000..6605ce134a018 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__Lw5ZKbaITR64_ormwZVmog differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__MiOlmmuaQLus8qTMapK-HA b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__MiOlmmuaQLus8qTMapK-HA new file mode 100644 index 0000000000000..03d81bf58f0b7 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__MiOlmmuaQLus8qTMapK-HA differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__PCbIJy9NR66eWvkmj0W2yA b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__PCbIJy9NR66eWvkmj0W2yA new file mode 100644 index 0000000000000..0c4ed44c47157 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__PCbIJy9NR66eWvkmj0W2yA differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__PSVIIb94R6WGZknZiAJvGg b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__PSVIIb94R6WGZknZiAJvGg new file mode 100644 index 0000000000000..54dbbee94b8d9 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__PSVIIb94R6WGZknZiAJvGg differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__REtuRhUpTfSBtRs_dN0fZg b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__REtuRhUpTfSBtRs_dN0fZg new file mode 100644 index 0000000000000..15f634d1f8d88 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__REtuRhUpTfSBtRs_dN0fZg differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__Rl_c6BJPTGW7E5uuo5eDIw b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__Rl_c6BJPTGW7E5uuo5eDIw new file mode 100644 index 0000000000000..745390590f346 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__Rl_c6BJPTGW7E5uuo5eDIw differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__SPQW2ou3T02QJxsmOrrvMg b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__SPQW2ou3T02QJxsmOrrvMg new file mode 100644 index 0000000000000..c238655c7f392 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__SPQW2ou3T02QJxsmOrrvMg differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__SfcedHiHRXaOxmmxYtbFlw b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__SfcedHiHRXaOxmmxYtbFlw new file mode 100644 index 0000000000000..13ec1b81210a7 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__SfcedHiHRXaOxmmxYtbFlw differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__TpwosEuPRm-uCXdsyY4hFw b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__TpwosEuPRm-uCXdsyY4hFw new file mode 100644 index 0000000000000..29ca447d5dd01 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__TpwosEuPRm-uCXdsyY4hFw differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__VTFzLU52RuiM9HxDBWOPhA b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__VTFzLU52RuiM9HxDBWOPhA new file mode 100644 index 0000000000000..19ea58d2da9e0 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__VTFzLU52RuiM9HxDBWOPhA differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__YBl88oxKQiSI1qHr2GLwxQ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__YBl88oxKQiSI1qHr2GLwxQ new file mode 100644 index 0000000000000..67276f69a45af Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__YBl88oxKQiSI1qHr2GLwxQ differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/___mIOvmULQBWV2zkBoDVveA b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/___mIOvmULQBWV2zkBoDVveA new file mode 100644 index 0000000000000..b39169f45fe76 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/___mIOvmULQBWV2zkBoDVveA differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__aG94RvZxT8Ksa-xz2fgEvg b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__aG94RvZxT8Ksa-xz2fgEvg new file mode 100644 index 0000000000000..cb23bd92639b9 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__aG94RvZxT8Ksa-xz2fgEvg differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__aRTsRTBPQpSlnI2YORTeYA b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__aRTsRTBPQpSlnI2YORTeYA new file mode 100644 index 0000000000000..2ef933ee43cab Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__aRTsRTBPQpSlnI2YORTeYA differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__kRTD6xS5Qxin-dxmKsTBMw b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__kRTD6xS5Qxin-dxmKsTBMw new file mode 100644 index 0000000000000..6f5a4932785ed Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__kRTD6xS5Qxin-dxmKsTBMw differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__l7M6Slx7T5CDnEKFqR8-gg b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__l7M6Slx7T5CDnEKFqR8-gg new file mode 100644 index 0000000000000..87effc6ac7d91 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__l7M6Slx7T5CDnEKFqR8-gg differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__m0R_-B8CSsG44znmiNNN2Q b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__m0R_-B8CSsG44znmiNNN2Q new file mode 100644 index 0000000000000..89465151b6824 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__m0R_-B8CSsG44znmiNNN2Q differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__oodLbOo3TDKZc57aEYPLCw b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__oodLbOo3TDKZc57aEYPLCw new file mode 100644 index 0000000000000..e5a30ed10c4e4 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__oodLbOo3TDKZc57aEYPLCw differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__rgBY_HmwRe2Yl7mgBTfiEw b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__rgBY_HmwRe2Yl7mgBTfiEw new file mode 100644 index 0000000000000..522596591a0de Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__rgBY_HmwRe2Yl7mgBTfiEw differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__tNe8csu0QoO6dTqllqk_dA b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__tNe8csu0QoO6dTqllqk_dA new file mode 100644 index 0000000000000..4ed7b362f6d08 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__tNe8csu0QoO6dTqllqk_dA differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__ucs8kEFbS3SLJlPbej4NLw b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__ucs8kEFbS3SLJlPbej4NLw new file mode 100644 index 0000000000000..adc95a94f16a8 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/__ucs8kEFbS3SLJlPbej4NLw differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/index-FWDT04_LTIaFE_BxwPeJfA b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/index-FWDT04_LTIaFE_BxwPeJfA new file mode 100644 index 0000000000000..4da0383b1f28e Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/index-FWDT04_LTIaFE_BxwPeJfA differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/snap-y1zcTBUTQFuKFwMMbi7VvQ.dat b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/snap-y1zcTBUTQFuKFwMMbi7VvQ.dat new file mode 100644 index 0000000000000..b3a2012402dbc Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/0/snap-y1zcTBUTQFuKFwMMbi7VvQ.dat differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/meta-ZOmqhpYBNSJu_urmZ8Mp.dat b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/meta-ZOmqhpYBNSJu_urmZ8Mp.dat new file mode 100644 index 0000000000000..d4be42f7b6e3a Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/indices/2MSR7bXrSyyoo2ZmErlBkg/meta-ZOmqhpYBNSJu_urmZ8Mp.dat differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/meta-y1zcTBUTQFuKFwMMbi7VvQ.dat b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/meta-y1zcTBUTQFuKFwMMbi7VvQ.dat new file mode 100644 index 0000000000000..afdc1c0374b99 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/meta-y1zcTBUTQFuKFwMMbi7VvQ.dat differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/snap-y1zcTBUTQFuKFwMMbi7VvQ.dat b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/snap-y1zcTBUTQFuKFwMMbi7VvQ.dat new file mode 100644 index 0000000000000..95090403c521c Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.16/snap-y1zcTBUTQFuKFwMMbi7VvQ.dat differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/index-0 b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/index-0 new file mode 100644 index 0000000000000..8820c0f1fa503 --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/index-0 @@ -0,0 +1 @@ +{"min_version":"7.12.0","uuid":"S5_NnfquTvy7T3xAMy6NlQ","cluster_id":"DPaomvnLQ1iRiAqo4uznXw","snapshots":[{"name":"kb_snapshot_8.18","uuid":"oHvKVUVfQ4W501XyBty3_Q","state":1,"index_metadata_lookup":{"xqR66eHHSniUBARvrCJl_w":"p9UA-4h1S2KqFVVYRvASWQ-_na_-2-2-1"},"version":"8.11.0","index_version":8525000,"start_time_millis":1745926677713,"end_time_millis":1745926677918,"slm_policy":""}],"indices":{".kibana-observability-ai-assistant-kb-000001":{"id":"xqR66eHHSniUBARvrCJl_w","snapshots":["oHvKVUVfQ4W501XyBty3_Q"],"shard_generations":["E0OyraKORESn_utIfSkkyg"]}},"index_metadata_identifiers":{"p9UA-4h1S2KqFVVYRvASWQ-_na_-2-2-1":"qUpWgZYBfn3I2UYp5TrQ"}} \ No newline at end of file diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/index.latest b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/index.latest new file mode 100644 index 0000000000000..1b1cb4d44c57c Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/index.latest differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/indices/xqR66eHHSniUBARvrCJl_w/0/__fd1J8ir-QFu4E0UfWCe62A b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/indices/xqR66eHHSniUBARvrCJl_w/0/__fd1J8ir-QFu4E0UfWCe62A new file mode 100644 index 0000000000000..eb6e5a886b94d Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/indices/xqR66eHHSniUBARvrCJl_w/0/__fd1J8ir-QFu4E0UfWCe62A differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/indices/xqR66eHHSniUBARvrCJl_w/0/__oE6vUpmtRsOKre8fj4wmPQ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/indices/xqR66eHHSniUBARvrCJl_w/0/__oE6vUpmtRsOKre8fj4wmPQ new file mode 100644 index 0000000000000..0a5f19b148f11 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/indices/xqR66eHHSniUBARvrCJl_w/0/__oE6vUpmtRsOKre8fj4wmPQ differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/indices/xqR66eHHSniUBARvrCJl_w/0/index-E0OyraKORESn_utIfSkkyg b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/indices/xqR66eHHSniUBARvrCJl_w/0/index-E0OyraKORESn_utIfSkkyg new file mode 100644 index 0000000000000..8dddc2a52b666 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/indices/xqR66eHHSniUBARvrCJl_w/0/index-E0OyraKORESn_utIfSkkyg differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/indices/xqR66eHHSniUBARvrCJl_w/0/snap-oHvKVUVfQ4W501XyBty3_Q.dat b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/indices/xqR66eHHSniUBARvrCJl_w/0/snap-oHvKVUVfQ4W501XyBty3_Q.dat new file mode 100644 index 0000000000000..1844f1b169955 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/indices/xqR66eHHSniUBARvrCJl_w/0/snap-oHvKVUVfQ4W501XyBty3_Q.dat differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/indices/xqR66eHHSniUBARvrCJl_w/meta-qUpWgZYBfn3I2UYp5TrQ.dat b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/indices/xqR66eHHSniUBARvrCJl_w/meta-qUpWgZYBfn3I2UYp5TrQ.dat new file mode 100644 index 0000000000000..131c2a6f3ea4c Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/indices/xqR66eHHSniUBARvrCJl_w/meta-qUpWgZYBfn3I2UYp5TrQ.dat differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/meta-oHvKVUVfQ4W501XyBty3_Q.dat b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/meta-oHvKVUVfQ4W501XyBty3_Q.dat new file mode 100644 index 0000000000000..afdc1c0374b99 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/meta-oHvKVUVfQ4W501XyBty3_Q.dat differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/snap-oHvKVUVfQ4W501XyBty3_Q.dat b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/snap-oHvKVUVfQ4W501XyBty3_Q.dat new file mode 100644 index 0000000000000..911009d0ce830 Binary files /dev/null and b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/snapshot_kb_8.18/snap-oHvKVUVfQ4W501XyBty3_Q.dat differ diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/public_complete/public_complete.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/public_complete/public_complete.spec.ts index 107e761c37814..63fb7f4691c77 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/public_complete/public_complete.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/public_complete/public_complete.spec.ts @@ -15,14 +15,12 @@ import { type StreamingChatResponseEvent, } from '@kbn/observability-ai-assistant-plugin/common/conversation_complete'; import { type Instruction } from '@kbn/observability-ai-assistant-plugin/common/types'; -import type { ChatCompletionChunkToolCall } from '@kbn/inference-common'; -import { ChatCompletionStreamParams } from 'openai/lib/ChatCompletionStream'; import { createLlmProxy, LlmProxy, - ToolMessage, } from '../../../../../../observability_ai_assistant_api_integration/common/create_llm_proxy'; import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; +import { decodeEvents } from '../utils/conversation'; export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { const log = getService('log'); @@ -41,23 +39,20 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon describe('/api/observability_ai_assistant/chat/complete', function () { // Fails on MKI: https://github.com/elastic/kibana/issues/205581 this.tags(['failsOnMKI']); - let proxy: LlmProxy; + let llmProxy: LlmProxy; let connectorId: string; - async function addInterceptorsAndCallComplete({ + async function callPublicChatComplete({ actions, instructions, format = 'default', - conversationResponse, + persist = true, }: { actions?: Array>; instructions?: Array; format?: 'openai' | 'default'; - conversationResponse: string | ToolMessage; + persist?: boolean; }) { - const titleSimulatorPromise = proxy.interceptTitle('My Title'); - const conversationSimulatorPromise = proxy.interceptConversation(conversationResponse); - const response = await observabilityAIAssistantAPIClient.admin({ endpoint: 'POST /api/observability_ai_assistant/chat/complete 2023-10-31', params: { @@ -65,38 +60,20 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon body: { messages, connectorId, - persist: true, + persist, actions, instructions, }, }, }); - await proxy.waitForAllInterceptorsToHaveBeenCalled(); - - const titleSimulator = await titleSimulatorPromise; - const conversationSimulator = await conversationSimulatorPromise; - - return { - titleSimulator, - conversationSimulator, - responseBody: String(response.body), - }; - } - - function getEventsFromBody(body: string) { - return body - .split('\n') - .map((line) => line.trim()) - .filter(Boolean) - .map((line) => JSON.parse(line) as StreamingChatResponseEvent) - .slice(2); // ignore context request/response, we're testing this elsewhere + return String(response.body); } before(async () => { - proxy = await createLlmProxy(log); + llmProxy = await createLlmProxy(log); connectorId = await observabilityAIAssistantAPIClient.createProxyActionConnector({ - port: proxy.getPort(), + port: llmProxy.getPort(), }); }); @@ -104,7 +81,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon await observabilityAIAssistantAPIClient.deleteActionConnector({ actionId: connectorId, }); - proxy.close(); + llmProxy.close(); }); const action = { @@ -120,27 +97,27 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon }, } as const; - const toolCallMock: ChatCompletionChunkToolCall = { - toolCallId: 'fake-index', - index: 0, - function: { - name: 'my_action', - arguments: JSON.stringify({ foo: 'bar' }), - }, - }; + afterEach(async () => { + llmProxy.clear(); + }); describe('after executing an action and closing the stream', () => { let events: StreamingChatResponseEvent[]; before(async () => { - const { responseBody } = await addInterceptorsAndCallComplete({ + void llmProxy.interceptTitle('My Title'); + void llmProxy.interceptWithFunctionRequest({ + name: 'my_action', + arguments: () => JSON.stringify({ foo: 'bar' }), + }); + + const responseBody = await callPublicChatComplete({ actions: [action], - conversationResponse: { - tool_calls: [toolCallMock], - }, }); - events = getEventsFromBody(responseBody); + await llmProxy.waitForAllInterceptorsToHaveBeenCalled(); + + events = decodeEvents(responseBody); }); it('does not persist the conversation (the last event is not a conversationUpdated event)', () => { @@ -149,29 +126,31 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon expect(lastEvent.type).to.be('messageAdd'); expect(lastEvent.message.message.function_call).to.eql({ name: 'my_action', - arguments: toolCallMock.function.arguments, + arguments: JSON.stringify({ foo: 'bar' }), trigger: MessageRole.Assistant, }); }); }); describe('after adding an instruction', () => { - let body: ChatCompletionStreamParams; - before(async () => { - const { conversationSimulator } = await addInterceptorsAndCallComplete({ + void llmProxy.interceptWithFunctionRequest({ + name: 'my_action', + arguments: () => JSON.stringify({ foo: 'bar' }), + }); + + await callPublicChatComplete({ instructions: ['This is a random instruction'], actions: [action], - conversationResponse: { - tool_calls: [toolCallMock], - }, + persist: false, }); - body = conversationSimulator.requestBody; + await llmProxy.waitForAllInterceptorsToHaveBeenCalled(); }); it('includes the instruction in the system message', async () => { - expect(body.messages[0].content).to.contain('This is a random instruction'); + const { requestBody } = llmProxy.interceptedRequests[0]; + expect(requestBody.messages[0].content).to.contain('This is a random instruction'); }); }); @@ -179,10 +158,12 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon let responseBody: string; before(async () => { - ({ responseBody } = await addInterceptorsAndCallComplete({ - format: 'openai', - conversationResponse: 'Hello', - })); + void llmProxy.interceptTitle('My Title'); + void llmProxy.interceptWithResponse('Hello'); + + responseBody = await callPublicChatComplete({ format: 'openai' }); + + await llmProxy.waitForAllInterceptorsToHaveBeenCalled(); }); function extractDataParts(lines: string[]) { @@ -194,12 +175,12 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon }); } - function getLines() { - return responseBody.split('\n\n').filter(Boolean); + function getLines(str: string) { + return str.split('\n\n').filter(Boolean); } it('outputs each line an SSE-compatible format (data: ...)', () => { - const lines = getLines(); + const lines = getLines(responseBody); lines.forEach((line) => { expect(line.match(/^data: /)); @@ -207,14 +188,14 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon }); it('ouputs one chunk, and one [DONE] event', () => { - const dataParts = extractDataParts(getLines()); + const dataParts = extractDataParts(getLines(responseBody)); expect(dataParts[0]).not.to.be.empty(); expect(dataParts[1]).to.be('[DONE]'); }); it('outuputs an OpenAI-compatible chunk', () => { - const [dataLine] = extractDataParts(getLines()); + const [dataLine] = extractDataParts(getLines(responseBody)); expect(() => { JSON.parse(dataLine); diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/index_assets.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/index_assets.ts index 5facf5a62f325..fed69ef98ab3a 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/index_assets.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/index_assets.ts @@ -7,44 +7,62 @@ import expect from '@kbn/expect'; import { Client } from '@elastic/elasticsearch'; -import { resourceNames } from '@kbn/observability-ai-assistant-plugin/server/service'; +import { + getResourceName, + resourceNames, +} from '@kbn/observability-ai-assistant-plugin/server/service'; import type { ObservabilityAIAssistantApiClient } from '../../../../services/observability_ai_assistant_api'; +import { TINY_ELSER_INFERENCE_ID } from './model_and_inference'; + +export async function runStartupMigrations( + observabilityAIAssistantAPIClient: ObservabilityAIAssistantApiClient +) { + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'POST /internal/observability_ai_assistant/kb/migrations/startup', + }); + expect(status).to.be(200); +} export async function createOrUpdateIndexAssets( observabilityAIAssistantAPIClient: ObservabilityAIAssistantApiClient ) { const { status } = await observabilityAIAssistantAPIClient.editor({ endpoint: 'POST /internal/observability_ai_assistant/index_assets', + params: { + query: { + inference_id: TINY_ELSER_INFERENCE_ID, + }, + }, }); expect(status).to.be(200); } -async function deleteWriteIndices(es: Client) { - const response = await es.indices.get({ index: Object.values(resourceNames.indexPatterns) }); +export async function deleteIndexAssets(es: Client) { + // delete write indices + const response = await es.indices.get({ index: getResourceName('*') }); const indicesToDelete = Object.keys(response); if (indicesToDelete.length > 0) { - await es.indices.delete({ index: indicesToDelete, ignore_unavailable: true }); + await es.indices.delete({ index: indicesToDelete, ignore_unavailable: true }).catch((err) => { + // ignore `IndexNotFoundException` error thrown by ES serverless: https://github.com/elastic/elasticsearch/blob/f1f745966f9c6b9d9fcad5242efb9a494d11e526/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java#L2120-L2124 + }); } + + await es.indices.deleteIndexTemplate({ name: getResourceName('*') }, { ignore: [404] }); + await es.cluster.deleteComponentTemplate({ name: getResourceName('*') }, { ignore: [404] }); } export async function restoreIndexAssets( observabilityAIAssistantAPIClient: ObservabilityAIAssistantApiClient, es: Client ) { - await deleteWriteIndices(es); - - // delete index templates - await es.indices.deleteIndexTemplate( - { name: Object.values(resourceNames.indexTemplate) }, - { ignore: [404] } - ); + await deleteIndexAssets(es); + await createOrUpdateIndexAssets(observabilityAIAssistantAPIClient); +} - // delete component templates - await es.cluster.deleteComponentTemplate( - { name: Object.values(resourceNames.componentTemplate) }, - { ignore: [404] } - ); +export async function getComponentTemplate(es: Client) { + const res = await es.cluster.getComponentTemplate({ + name: resourceNames.componentTemplate.kb, + }); - // create index assets from scratch - await createOrUpdateIndexAssets(observabilityAIAssistantAPIClient); + return res.component_templates[0]; } diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/logger.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/kibana_mocks.ts similarity index 51% rename from x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/logger.ts rename to x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/kibana_mocks.ts index 7248d279bd492..0968b5429fed6 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/logger.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/kibana_mocks.ts @@ -5,7 +5,11 @@ * 2.0. */ +import { Client } from '@elastic/elasticsearch'; +import { CoreSetup } from '@kbn/core/server'; import { Logger } from '@kbn/logging'; +import { ObservabilityAIAssistantConfig } from '@kbn/observability-ai-assistant-plugin/server/config'; +import { ObservabilityAIAssistantPluginStartDependencies } from '@kbn/observability-ai-assistant-plugin/server/types'; import { ToolingLog } from '@kbn/tooling-log'; export function getLoggerMock(toolingLog: ToolingLog) { @@ -16,5 +20,19 @@ export function getLoggerMock(toolingLog: ToolingLog) { warn: (...args: any[]) => toolingLog.warning(...args), fatal: (...args: any[]) => toolingLog.warning(...args), trace: (...args: any[]) => toolingLog.debug(...args), + get: () => getLoggerMock(toolingLog), } as unknown as Logger; } + +export function getCoreMock(es: Client) { + return { + getStartServices: async () => [{ elasticsearch: { client: { asInternalUser: es } } }], + } as unknown as CoreSetup; +} + +export function getConfigMock(config: Partial) { + return { + enableKnowledgeBase: true, + ...config, + } as ObservabilityAIAssistantConfig; +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/knowledge_base.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/knowledge_base.ts index 013e13b277dee..0b6667b27e932 100644 --- a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/knowledge_base.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/knowledge_base.ts @@ -5,143 +5,70 @@ * 2.0. */ -import expect from '@kbn/expect'; import { Client } from '@elastic/elasticsearch'; -import { AI_ASSISTANT_KB_INFERENCE_ID } from '@kbn/observability-ai-assistant-plugin/server/service/inference_endpoint'; -import { ToolingLog } from '@kbn/tooling-log'; -import { RetryService } from '@kbn/ftr-common-functional-services'; -import { Instruction } from '@kbn/observability-ai-assistant-plugin/common/types'; +import { + Instruction, + KnowledgeBaseEntry, + KnowledgeBaseState, +} from '@kbn/observability-ai-assistant-plugin/common/types'; import { resourceNames } from '@kbn/observability-ai-assistant-plugin/server/service'; +import expect from '@kbn/expect'; import { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; -import type { ObservabilityAIAssistantApiClient } from '../../../../services/observability_ai_assistant_api'; -import { MachineLearningProvider } from '../../../../../services/ml'; -import { SUPPORTED_TRAINED_MODELS } from '../../../../../../functional/services/ml/api'; import { setAdvancedSettings } from './advanced_settings'; +import { TINY_ELSER_INFERENCE_ID } from './model_and_inference'; +import type { ObservabilityAIAssistantApiClient } from '../../../../services/observability_ai_assistant_api'; -export const TINY_ELSER = { - ...SUPPORTED_TRAINED_MODELS.TINY_ELSER, - id: SUPPORTED_TRAINED_MODELS.TINY_ELSER.name, -}; - -export async function importTinyElserModel(ml: ReturnType) { - const config = { - ...ml.api.getTrainedModelConfig(TINY_ELSER.name), - input: { - field_names: ['text_field'], - }, - }; - // necessary for MKI, check indices before importing model. compatible with stateful - await ml.api.assureMlStatsIndexExists(); - await ml.api.importTrainedModel(TINY_ELSER.name, TINY_ELSER.id, config); +export async function clearKnowledgeBase(es: Client) { + return es.deleteByQuery({ + index: resourceNames.indexPatterns.kb, + conflicts: 'proceed', + query: { match_all: {} }, + refresh: true, + }); } -export async function setupKnowledgeBase( +export async function waitForKnowledgeBaseIndex( getService: DeploymentAgnosticFtrProviderContext['getService'], - { - deployModel: deployModel = true, - }: { - deployModel?: boolean; - } = {} + expectedIndex: string ) { - const log = getService('log'); - const ml = getService('ml'); const retry = getService('retry'); - const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); - - if (deployModel) { - await importTinyElserModel(ml); - } + const es = getService('es'); - const { status, body } = await observabilityAIAssistantAPIClient.admin({ - endpoint: 'POST /internal/observability_ai_assistant/kb/setup', - params: { - query: { - model_id: TINY_ELSER.id, - }, - }, + await retry.try(async () => { + const currentIndex = await getConcreteWriteIndexFromAlias(es); + expect(currentIndex).to.be(expectedIndex); }); - - if (deployModel) { - await waitForKnowledgeBaseReady({ observabilityAIAssistantAPIClient, log, retry }); - } - - return { status, body }; } -export async function waitForKnowledgeBaseReady({ - observabilityAIAssistantAPIClient, - log, - retry, -}: { - observabilityAIAssistantAPIClient: ObservabilityAIAssistantApiClient; - log: ToolingLog; - retry: RetryService; -}) { +export async function waitForKnowledgeBaseReady( + getService: DeploymentAgnosticFtrProviderContext['getService'] +) { + const retry = getService('retry'); + const log = getService('log'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + await retry.tryForTime(5 * 60 * 1000, async () => { log.debug(`Waiting for knowledge base to be ready...`); const res = await observabilityAIAssistantAPIClient.editor({ endpoint: 'GET /internal/observability_ai_assistant/kb/status', }); expect(res.status).to.be(200); - expect(res.body.ready).to.be(true); + expect(res.body.kbState).to.be(KnowledgeBaseState.READY); + expect(res.body.isReIndexing).to.be(false); + log.debug(`Knowledge base is in ready state.`); }); } -export async function deleteKnowledgeBaseModel( - getService: DeploymentAgnosticFtrProviderContext['getService'], - { - shouldDeleteInferenceEndpoint = true, - }: { - shouldDeleteInferenceEndpoint?: boolean; - } = {} +export async function setupKnowledgeBase( + observabilityAIAssistantAPIClient: ObservabilityAIAssistantApiClient, + inferenceId: string ) { - const log = getService('log'); - const ml = getService('ml'); - const es = getService('es'); - - try { - await ml.api.stopTrainedModelDeploymentES(TINY_ELSER.id, true); - await ml.api.deleteTrainedModelES(TINY_ELSER.id); - await ml.testResources.cleanMLSavedObjects(); - - if (shouldDeleteInferenceEndpoint) { - await deleteInferenceEndpoint({ es }); - } - } catch (e) { - if (e.message.includes('resource_not_found_exception')) { - log.debug(`Knowledge base model was already deleted.`); - return; - } - - log.error(`Could not delete knowledge base model: ${e}`); - } -} - -export async function clearKnowledgeBase(es: Client) { - return es.deleteByQuery({ - index: resourceNames.indexPatterns.kb, - conflicts: 'proceed', - query: { match_all: {} }, - refresh: true, - }); -} - -export async function getAllKbEntries(es: Client) { - const response = await es.search({ - index: resourceNames.indexPatterns.kb, - query: { match_all: {} }, + return observabilityAIAssistantAPIClient.admin({ + endpoint: 'POST /internal/observability_ai_assistant/kb/setup', + params: { + query: { inference_id: inferenceId }, + }, }); - return response.hits.hits; -} - -export async function deleteInferenceEndpoint({ - es, - name = AI_ASSISTANT_KB_INFERENCE_ID, -}: { - es: Client; - name?: string; -}) { - return es.inference.delete({ inference_id: name, force: true }); } export async function addSampleDocsToInternalKb( @@ -149,6 +76,7 @@ export async function addSampleDocsToInternalKb( sampleDocs: Array ) { const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + const es = getService('es'); await observabilityAIAssistantAPIClient.editor({ endpoint: 'POST /internal/observability_ai_assistant/kb/entries/import', @@ -158,6 +86,9 @@ export async function addSampleDocsToInternalKb( }, }, }); + + // refresh the index to make sure the documents are searchable + await es.indices.refresh({ index: resourceNames.indexPatterns.kb }); } export async function addSampleDocsToCustomIndex( @@ -176,7 +107,7 @@ export async function addSampleDocsToCustomIndex( mappings: { properties: { title: { type: 'text' }, - text: { type: 'semantic_text', inference_id: AI_ASSISTANT_KB_INFERENCE_ID }, + text: { type: 'semantic_text', inference_id: TINY_ELSER_INFERENCE_ID }, }, }, }); @@ -219,10 +150,16 @@ export async function deleteKbIndices(es: Client) { } export async function getConcreteWriteIndexFromAlias(es: Client) { - const response = await es.indices.getAlias({ index: resourceNames.aliases.kb }); - return Object.entries(response).find( - ([index, aliasInfo]) => aliasInfo.aliases[resourceNames.aliases.kb]?.is_write_index + const response = await es.indices.getAlias({ index: resourceNames.writeIndexAlias.kb }); + const writeIndex = Object.entries(response).find( + ([index, aliasInfo]) => aliasInfo.aliases[resourceNames.writeIndexAlias.kb]?.is_write_index )?.[0]; + + if (!writeIndex) { + throw new Error(`Could not find write index for alias ${resourceNames.writeIndexAlias.kb}`); + } + + return writeIndex; } export async function hasIndexWriteBlock(es: Client, index: string) { @@ -230,3 +167,79 @@ export async function hasIndexWriteBlock(es: Client, index: string) { const writeBlockSetting = Object.values(response)[0]?.settings?.index?.blocks?.write; return writeBlockSetting === 'true' || writeBlockSetting === true; } + +export async function getKbIndexCreatedVersion(es: Client) { + const indexSettings = await es.indices.getSettings({ + index: resourceNames.writeIndexAlias.kb, + human: true, + }); + + const { settings } = Object.values(indexSettings)[0]; + const createdVersion = settings?.index?.version?.created_string; + if (!createdVersion) { + throw new Error(`Could not find created version for index ${resourceNames.writeIndexAlias.kb}`); + } + return createdVersion; +} + +export async function reIndexKnowledgeBase( + observabilityAIAssistantAPIClient: ObservabilityAIAssistantApiClient +) { + return observabilityAIAssistantAPIClient.admin({ + endpoint: 'POST /internal/observability_ai_assistant/kb/reindex', + params: { + query: { + inference_id: TINY_ELSER_INFERENCE_ID, + }, + }, + }); +} + +interface SemanticTextField { + semantic_text: string; + _inference_fields: { + semantic_text: { + inference: { + inference_id: string; + chunks: { + semantic_text: Array<{ + embeddings: + | Record // sparse embedding + | number[]; // dense embedding; + }>; + }; + }; + }; + }; +} + +export async function getKnowledgeBaseEntriesFromEs(es: Client) { + const res = await es.search({ + size: 1000, + index: resourceNames.writeIndexAlias.kb, + // Add fields parameter to include inference metadata + fields: ['_inference_fields'], + query: { + match_all: {}, + }, + }); + + return res.hits.hits; +} + +export function getKnowledgeBaseEntriesFromApi({ + observabilityAIAssistantAPIClient, + query = '', + sortBy = 'title', + sortDirection = 'asc', +}: { + observabilityAIAssistantAPIClient: ObservabilityAIAssistantApiClient; + query?: string; + sortBy?: string; + sortDirection?: 'asc' | 'desc'; +}) { + return observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/kb/entries', + params: { query: { query, sortBy, sortDirection } }, + }); +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/model_and_inference.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/model_and_inference.ts new file mode 100644 index 0000000000000..9dcbc1998c6af --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/model_and_inference.ts @@ -0,0 +1,198 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { Client } from '@elastic/elasticsearch'; +import { ToolingLog } from '@kbn/tooling-log'; +import { InferenceTaskType } from '@elastic/elasticsearch/lib/api/types'; +import { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; +import { MachineLearningProvider } from '../../../../../services/ml'; +import { SUPPORTED_TRAINED_MODELS } from '../../../../../../functional/services/ml/api'; +import { setupKnowledgeBase, waitForKnowledgeBaseReady } from './knowledge_base'; + +export const LEGACY_CUSTOM_INFERENCE_ID = 'obs_ai_assistant_kb_inference'; + +// tiny models +export const TINY_ELSER_MODEL_ID = SUPPORTED_TRAINED_MODELS.TINY_ELSER.name; +export const TINY_TEXT_EMBEDDING_MODEL_ID = SUPPORTED_TRAINED_MODELS.TINY_TEXT_EMBEDDING.name; + +// tiny inference endpoints +export const TINY_ELSER_INFERENCE_ID = 'pt_tiny_elser_inference_id'; +export const TINY_TEXT_EMBEDDING_INFERENCE_ID = 'pt_tiny_text_embedding_inference_id'; + +export async function importModel( + ml: ReturnType, + { + modelId, + }: { + modelId: typeof TINY_ELSER_MODEL_ID | typeof TINY_TEXT_EMBEDDING_MODEL_ID; + } +) { + const config = ml.api.getTrainedModelConfig(modelId); + await ml.api.assureMlStatsIndexExists(); + await ml.api.importTrainedModel(modelId, modelId, config); +} + +export async function setupTinyElserModelAndInferenceEndpoint( + getService: DeploymentAgnosticFtrProviderContext['getService'] +) { + const ml = getService('ml'); + + await importModel(ml, { modelId: TINY_ELSER_MODEL_ID }); + await createTinyElserInferenceEndpoint(getService, { inferenceId: TINY_ELSER_INFERENCE_ID }); +} + +export async function teardownTinyElserModelAndInferenceEndpoint( + getService: DeploymentAgnosticFtrProviderContext['getService'] +) { + await deleteModel(getService, { modelId: TINY_ELSER_MODEL_ID }); + await deleteInferenceEndpoint(getService, { inferenceId: TINY_ELSER_INFERENCE_ID }); +} + +export function createTinyElserInferenceEndpoint( + getService: DeploymentAgnosticFtrProviderContext['getService'], + { inferenceId }: { inferenceId: string } +) { + const es = getService('es'); + const log = getService('log'); + + return createInferenceEndpoint({ + es, + log, + modelId: TINY_ELSER_MODEL_ID, + inferenceId, + taskType: 'sparse_embedding', + }); +} + +export function createTinyTextEmbeddingInferenceEndpoint( + getService: DeploymentAgnosticFtrProviderContext['getService'], + { inferenceId }: { inferenceId: string } +) { + const es = getService('es'); + const log = getService('log'); + + return createInferenceEndpoint({ + es, + log, + modelId: TINY_TEXT_EMBEDDING_MODEL_ID, + inferenceId, + taskType: 'text_embedding', + }); +} + +export async function deployTinyElserAndSetupKb( + getService: DeploymentAgnosticFtrProviderContext['getService'] +) { + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + + await setupTinyElserModelAndInferenceEndpoint(getService); + const { status, body } = await setupKnowledgeBase( + observabilityAIAssistantAPIClient, + TINY_ELSER_INFERENCE_ID + ); + await waitForKnowledgeBaseReady(getService); + + return { status, body }; +} + +export async function deleteInferenceEndpoint( + getService: DeploymentAgnosticFtrProviderContext['getService'], + { + inferenceId, + }: { + inferenceId: string; + } +) { + const es = getService('es'); + const log = getService('log'); + + try { + await es.inference.delete({ inference_id: inferenceId, force: true }); + log.info(`Inference endpoint "${inferenceId}" deleted.`); + } catch (e) { + if (e.message.includes('resource_not_found_exception')) { + log.debug(`Inference endpoint "${inferenceId}" was already deleted.`); + } else { + log.error(`Could not delete inference endpoint "${inferenceId}": ${e}`); + } + } +} + +export async function createInferenceEndpoint({ + es, + log, + inferenceId, + modelId, + taskType, +}: { + es: Client; + log: ToolingLog; + inferenceId: string; + modelId: string; + taskType?: InferenceTaskType; +}) { + try { + const res = await es.inference.put({ + inference_id: inferenceId, + task_type: taskType, + inference_config: { + service: 'elasticsearch', + service_settings: { + model_id: modelId, + adaptive_allocations: { enabled: true, min_number_of_allocations: 1 }, + num_threads: 1, + }, + task_settings: {}, + }, + }); + + log.info(`Inference endpoint ${inferenceId} created.`); + return res; + } catch (e) { + log.error(`Error creating inference endpoint "${inferenceId}": ${e}`); + throw e; + } +} + +export async function deleteModel( + getService: DeploymentAgnosticFtrProviderContext['getService'], + { + modelId, + }: { + modelId: typeof TINY_ELSER_MODEL_ID | typeof TINY_TEXT_EMBEDDING_MODEL_ID; + } +) { + const log = getService('log'); + const ml = getService('ml'); + + try { + await ml.api.stopTrainedModelDeploymentES(modelId, true); + await ml.api.deleteTrainedModelES(modelId); + await ml.testResources.cleanMLSavedObjects(); + log.info(`Knowledge base model deleted.`); + } catch (e) { + if (e.message.includes('resource_not_found_exception')) { + log.debug(`Knowledge base model was already deleted.`); + } else { + log.error(`Could not delete knowledge base model: ${e}`); + } + } +} + +export async function stopTinyElserModel( + getService: DeploymentAgnosticFtrProviderContext['getService'] +) { + const log = getService('log'); + const ml = getService('ml'); + + try { + await ml.api.stopTrainedModelDeploymentES(TINY_ELSER_INFERENCE_ID, true); + log.info(`Knowledge base model (${TINY_ELSER_MODEL_ID}) stopped.`); + } catch (e) { + log.error(`Could not stop knowledge base model (${TINY_ELSER_MODEL_ID}): ${e}`); + } +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/snapshots.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/snapshots.ts new file mode 100644 index 0000000000000..448cdcc8561e0 --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/snapshots.ts @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { Client } from '@elastic/elasticsearch'; +import { resourceNames } from '@kbn/observability-ai-assistant-plugin/server/service'; +import { ToolingLog } from '@kbn/tooling-log'; +import path from 'path'; +import { AI_ASSISTANT_SNAPSHOT_REPO_PATH } from '../../../../default_configs/stateful.config.base'; + +export async function restoreKbSnapshot({ + log, + es, + snapshotFolderName, + snapshotName, +}: { + log: ToolingLog; + es: Client; + snapshotFolderName: string; + snapshotName: string; +}) { + const snapshotLocation = path.join(AI_ASSISTANT_SNAPSHOT_REPO_PATH, snapshotFolderName); + + const snapshotRepoName = `my_repo_${snapshotFolderName}`; + log.debug(`Creating snapshot repository "${snapshotRepoName}" from "${snapshotLocation}"`); + await es.snapshot.createRepository({ + name: snapshotFolderName, + repository: { + type: 'fs', + settings: { location: snapshotLocation }, + }, + }); + + try { + log.debug(`Restoring snapshot of "${resourceNames.concreteWriteIndexName.kb}"`); + await es.snapshot.restore({ + repository: snapshotFolderName, + snapshot: snapshotName, + wait_for_completion: true, + indices: resourceNames.concreteWriteIndexName.kb, + }); + } catch (error) { + log.error(`Error restoring snapshot: ${error.message}`); + throw error; + } finally { + log.debug(`Deleting snapshot repository "${snapshotFolderName}"`); + await es.snapshot.deleteRepository({ name: snapshotFolderName }); + } +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/tasks.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/tasks.ts new file mode 100644 index 0000000000000..17dfd6dc37457 --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/tasks.ts @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { Client } from '@elastic/elasticsearch'; +import { getActiveReindexingTaskId } from '@kbn/observability-ai-assistant-plugin/server/service/knowledge_base_service/reindex_knowledge_base'; +import pRetry from 'p-retry'; + +export async function waitForIndexTaskToComplete(es: Client) { + await pRetry( + async () => { + const taskId = await getActiveReindexingTaskId({ asInternalUser: es }); + if (!taskId) { + throw new Error('Waiting for reindexing task to start'); + } + }, + { retries: 50, factor: 1, minTimeout: 500 } + ); + + await pRetry( + async () => { + const taskId = await getActiveReindexingTaskId({ asInternalUser: es }); + if (taskId) { + throw new Error('Waiting for reindexing task to complete'); + } + }, + { retries: 10, factor: 1, minTimeout: 500 } + ); +} diff --git a/x-pack/test/api_integration/deployment_agnostic/default_configs/stateful.config.base.ts b/x-pack/test/api_integration/deployment_agnostic/default_configs/stateful.config.base.ts index eea897650bbe8..eaaf1bf91202d 100644 --- a/x-pack/test/api_integration/deployment_agnostic/default_configs/stateful.config.base.ts +++ b/x-pack/test/api_integration/deployment_agnostic/default_configs/stateful.config.base.ts @@ -37,7 +37,7 @@ interface CreateTestConfigOptions { export const AI_ASSISTANT_SNAPSHOT_REPO_PATH = path.resolve( REPO_ROOT, - 'x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshot_kb_8.10' + 'x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots/' ); export function createStatefulTestConfig( diff --git a/x-pack/test/functional/es_archives/observability/ai_assistant/knowledge_base_8_15/data.json.gz b/x-pack/test/functional/es_archives/observability/ai_assistant/knowledge_base_8_15/data.json.gz deleted file mode 100644 index ac64be04d3a23..0000000000000 Binary files a/x-pack/test/functional/es_archives/observability/ai_assistant/knowledge_base_8_15/data.json.gz and /dev/null differ diff --git a/x-pack/test/functional/es_archives/observability/ai_assistant/knowledge_base_8_15/mappings.json b/x-pack/test/functional/es_archives/observability/ai_assistant/knowledge_base_8_15/mappings.json deleted file mode 100644 index 68501ded3d887..0000000000000 --- a/x-pack/test/functional/es_archives/observability/ai_assistant/knowledge_base_8_15/mappings.json +++ /dev/null @@ -1,102 +0,0 @@ -{ - "type": "index", - "value": { - "aliases": { - ".kibana-observability-ai-assistant-kb": { - "is_write_index": true - } - }, - "index": ".kibana-observability-ai-assistant-kb-000001", - "mappings": { - "dynamic": "false", - "properties": { - "@timestamp": { - "type": "date" - }, - "confidence": { - "ignore_above": 1024, - "type": "keyword" - }, - "conversation": { - "properties": { - "id": { - "ignore_above": 1024, - "type": "keyword" - }, - "last_updated": { - "type": "date" - }, - "title": { - "type": "text" - } - } - }, - "doc_id": { - "fielddata": true, - "type": "text" - }, - "id": { - "ignore_above": 1024, - "type": "keyword" - }, - "is_correction": { - "type": "boolean" - }, - "labels": { - "dynamic": "true", - "type": "object" - }, - "ml": { - "properties": { - "tokens": { - "type": "rank_features" - } - } - }, - "namespace": { - "ignore_above": 1024, - "type": "keyword" - }, - "public": { - "type": "boolean" - }, - "text": { - "type": "text" - }, - "title": { - "fields": { - "keyword": { - "ignore_above": 256, - "type": "keyword" - } - }, - "type": "text" - }, - "type": { - "ignore_above": 1024, - "type": "keyword" - }, - "user": { - "properties": { - "id": { - "ignore_above": 1024, - "type": "keyword" - }, - "name": { - "ignore_above": 1024, - "type": "keyword" - } - } - } - } - }, - "settings": { - "index": { - "auto_expand_replicas": "0-1", - "hidden": "true", - "number_of_replicas": "0", - "number_of_shards": "1" - } - } - } -} diff --git a/x-pack/test/functional/services/ml/api.ts b/x-pack/test/functional/services/ml/api.ts index 3d2d1004528d0..81ac1c269e7ea 100644 --- a/x-pack/test/functional/services/ml/api.ts +++ b/x-pack/test/functional/services/ml/api.ts @@ -1595,6 +1595,16 @@ export function MachineLearningAPIProvider({ getService }: FtrProviderContext) { log.debug('> Trained model deleted'); }, + async startTrainedModelDeploymentES(modelId: string) { + log.debug(`Starting trained model deployment with id "${modelId}"`); + const url = `/_ml/trained_models/${modelId}/deployment/_start`; + + const { body, status } = await esSupertest.post(url); + this.assertResponseStatusCode(200, status, body); + + log.debug('> Trained model deployment started'); + }, + async deleteAllTrainedModelsES() { log.debug(`Deleting all trained models`); const getModelsRsp = await this.getTrainedModelsES(); diff --git a/x-pack/test/observability_ai_assistant_api_integration/common/create_llm_proxy.ts b/x-pack/test/observability_ai_assistant_api_integration/common/create_llm_proxy.ts index 5dbda8680d0e9..25b53c226cdcd 100644 --- a/x-pack/test/observability_ai_assistant_api_integration/common/create_llm_proxy.ts +++ b/x-pack/test/observability_ai_assistant_api_integration/common/create_llm_proxy.ts @@ -14,6 +14,8 @@ import { TITLE_CONVERSATION_FUNCTION_NAME } from '@kbn/observability-ai-assistan import pRetry from 'p-retry'; import type { ChatCompletionChunkToolCall } from '@kbn/inference-common'; import { ChatCompletionStreamParams } from 'openai/lib/ChatCompletionStream'; +import { SCORE_FUNCTION_NAME } from '@kbn/observability-ai-assistant-plugin/server/utils/recall/score_suggestions'; +import { SELECT_RELEVANT_FIELDS_NAME } from '@kbn/observability-ai-assistant-plugin/server/functions/get_dataset_info/get_relevant_field_names'; import { createOpenAiChunk } from './create_openai_chunk'; type Request = http.IncomingMessage; @@ -90,11 +92,11 @@ export class LlmProxy { const errorMessage = `No interceptors found to handle request: ${request.method} ${request.url}`; const availableInterceptorNames = this.interceptors.map(({ name }) => name); - this.log.error( + this.log.warning( `Available interceptors: ${JSON.stringify(availableInterceptorNames, null, 2)}` ); - this.log.error( + this.log.warning( `${errorMessage}. Messages: ${JSON.stringify(requestBody.messages, null, 2)}` ); response.writeHead(500, { @@ -122,6 +124,7 @@ export class LlmProxy { this.log.debug(`Closing LLM Proxy on port ${this.port}`); clearInterval(this.interval); this.server.close(); + this.clear(); } waitForAllInterceptorsToHaveBeenCalled() { @@ -138,7 +141,9 @@ export class LlmProxy { )}` ); if (this.interceptors.length > 0) { - throw new Error(`Interceptors were not called: ${unsettledInterceptors}`); + throw new Error( + `Interceptors were not called: ${unsettledInterceptors.map((name) => `\n - ${name}`)}` + ); } }, { retries: 5, maxTimeout: 1000 } @@ -148,8 +153,8 @@ export class LlmProxy { }); } - interceptConversation( - msg: LLMMessage, + interceptWithResponse( + msg: string | string[], { name, }: { @@ -157,7 +162,9 @@ export class LlmProxy { } = {} ) { return this.intercept( - `Conversation interceptor: "${name ?? 'Unnamed'}"`, + `interceptWithResponse: "${ + name ?? isString(msg) ? msg.slice(0, 80) : `${msg.length} chunks` + }"`, // @ts-expect-error (body) => body.tool_choice?.function?.name === undefined, msg @@ -165,30 +172,36 @@ export class LlmProxy { } interceptWithFunctionRequest({ - name: name, + name, arguments: argumentsCallback, - when, + when = () => true, + interceptorName, }: { name: string; arguments: (body: ChatCompletionStreamParams) => string; - when: RequestInterceptor['when']; + when?: RequestInterceptor['when']; + interceptorName?: string; }) { - // @ts-expect-error - return this.intercept(`Function request interceptor: "${name}"`, when, (body) => { - return { - content: '', - tool_calls: [ - { - function: { - name, - arguments: argumentsCallback(body), + return this.intercept( + interceptorName ?? `interceptWithFunctionRequest: "${name}"`, + when, + // @ts-expect-error + (body) => { + return { + content: '', + tool_calls: [ + { + function: { + name, + arguments: argumentsCallback(body), + }, + index: 0, + id: `call_${uuidv4()}`, }, - index: 0, - id: `call_${uuidv4()}`, - }, - ], - }; - }).completeAfterIntercept(); + ], + }; + } + ).completeAfterIntercept(); } interceptSelectRelevantFieldsToolChoice({ @@ -197,9 +210,10 @@ export class LlmProxy { }: { from?: number; to?: number } = {}) { let relevantFields: RelevantField[] = []; const simulator = this.interceptWithFunctionRequest({ - name: 'select_relevant_fields', - // @ts-expect-error - when: (requestBody) => requestBody.tool_choice?.function?.name === 'select_relevant_fields', + name: SELECT_RELEVANT_FIELDS_NAME, + when: (requestBody) => + // @ts-expect-error + requestBody.tool_choice?.function?.name === SELECT_RELEVANT_FIELDS_NAME, arguments: (requestBody) => { const messageWithFieldIds = last(requestBody.messages); const matches = (messageWithFieldIds?.content as string).match(/\{[\s\S]*?\}/g)!; @@ -224,11 +238,13 @@ export class LlmProxy { let documents: KnowledgeBaseDocument[] = []; const simulator = this.interceptWithFunctionRequest({ - name: 'score', + name: SCORE_FUNCTION_NAME, // @ts-expect-error - when: (requestBody) => requestBody.tool_choice?.function?.name === 'score', + when: (requestBody) => requestBody.tool_choice?.function?.name === SCORE_FUNCTION_NAME, arguments: (requestBody) => { - documents = extractDocumentsFromMessage(last(requestBody.messages)?.content as string, log); + const lastMessage = last(requestBody.messages)?.content as string; + log.debug(`interceptScoreToolChoice: ${lastMessage}`); + documents = extractDocumentsFromMessage(lastMessage, log); const scores = documents.map((doc: KnowledgeBaseDocument) => `${doc.id},7`).join(';'); return JSON.stringify({ scores }); @@ -247,6 +263,7 @@ export class LlmProxy { interceptTitle(title: string) { return this.interceptWithFunctionRequest({ name: TITLE_CONVERSATION_FUNCTION_NAME, + interceptorName: `Title: "${title}"`, arguments: () => JSON.stringify({ title }), // @ts-expect-error when: (body) => body.tool_choice?.function?.name === TITLE_CONVERSATION_FUNCTION_NAME, @@ -278,7 +295,7 @@ export class LlmProxy { requestBody, status: once((status: number) => { response.writeHead(status, { - 'Elastic-Interceptor': name, + 'Elastic-Interceptor': name.replace(/[^\x20-\x7E]/g, ' '), // Keeps only alphanumeric characters and spaces 'Content-Type': 'text/event-stream', 'Cache-Control': 'no-cache', Connection: 'keep-alive', diff --git a/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/helpers.ts b/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/helpers.ts deleted file mode 100644 index 2d7acb7fd485e..0000000000000 --- a/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/helpers.ts +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { Client } from '@elastic/elasticsearch'; -import { AI_ASSISTANT_KB_INFERENCE_ID } from '@kbn/observability-ai-assistant-plugin/server/service/inference_endpoint'; -import { MachineLearningProvider } from '../../../api_integration/services/ml'; -import { SUPPORTED_TRAINED_MODELS } from '../../../functional/services/ml/api'; - -export const TINY_ELSER = { - ...SUPPORTED_TRAINED_MODELS.TINY_ELSER, - id: SUPPORTED_TRAINED_MODELS.TINY_ELSER.name, -}; - -export async function importTinyElserModel(ml: ReturnType) { - const config = { - ...ml.api.getTrainedModelConfig(TINY_ELSER.name), - input: { - field_names: ['text_field'], - }, - }; - // necessary for MKI, check indices before importing model. compatible with stateful - await ml.api.assureMlStatsIndexExists(); - await ml.api.importTrainedModel(TINY_ELSER.name, TINY_ELSER.id, config); -} - -export async function deleteKnowledgeBaseModel(ml: ReturnType) { - await ml.api.stopTrainedModelDeploymentES(TINY_ELSER.id, true); - await ml.api.deleteTrainedModelES(TINY_ELSER.id); - await ml.testResources.cleanMLSavedObjects(); -} - -export async function clearKnowledgeBase(es: Client) { - const KB_INDEX = '.kibana-observability-ai-assistant-kb-*'; - - return es.deleteByQuery({ - index: KB_INDEX, - conflicts: 'proceed', - query: { match_all: {} }, - refresh: true, - }); -} - -export async function deleteInferenceEndpoint({ - es, - name = AI_ASSISTANT_KB_INFERENCE_ID, -}: { - es: Client; - name?: string; -}) { - return es.inference.delete({ inference_id: name, force: true }); -} diff --git a/x-pack/test/observability_ai_assistant_functional/common/config.ts b/x-pack/test/observability_ai_assistant_functional/common/config.ts index 2396129e0b0ff..eb267123adf2b 100644 --- a/x-pack/test/observability_ai_assistant_functional/common/config.ts +++ b/x-pack/test/observability_ai_assistant_functional/common/config.ts @@ -60,7 +60,7 @@ async function getTestConfig({ services: { observabilityAIAssistantUI: (context: InheritedFtrProviderContext) => ObservabilityAIAssistantUIProvider(context), - observabilityAIAssistantAPIClient: async () => { + observabilityAIAssistantApi: async () => { return { admin: getScopedApiClient(kibanaServer, 'elastic'), viewer: getScopedApiClient(kibanaServer, viewer.username), diff --git a/x-pack/test/observability_ai_assistant_functional/common/connectors.ts b/x-pack/test/observability_ai_assistant_functional/common/connectors.ts index 0930c1e4ff7c4..fc06a33cd0d72 100644 --- a/x-pack/test/observability_ai_assistant_functional/common/connectors.ts +++ b/x-pack/test/observability_ai_assistant_functional/common/connectors.ts @@ -7,6 +7,7 @@ import { Agent as SuperTestAgent } from 'supertest'; import { LlmProxy } from '../../observability_ai_assistant_api_integration/common/create_llm_proxy'; + export async function createConnector(proxy: LlmProxy, supertest: SuperTestAgent) { await supertest .post('/api/actions/connector') diff --git a/x-pack/test/observability_ai_assistant_functional/common/conversations.ts b/x-pack/test/observability_ai_assistant_functional/common/conversations.ts index 39f64fd65811c..6a2c854fd8aee 100644 --- a/x-pack/test/observability_ai_assistant_functional/common/conversations.ts +++ b/x-pack/test/observability_ai_assistant_functional/common/conversations.ts @@ -8,7 +8,7 @@ import { FtrProviderContext } from '../ftr_provider_context'; export async function deleteConversations(getService: FtrProviderContext['getService']) { - const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); const response = await observabilityAIAssistantAPIClient.editor({ endpoint: 'POST /internal/observability_ai_assistant/conversations', diff --git a/x-pack/test/observability_ai_assistant_functional/common/ui/index.ts b/x-pack/test/observability_ai_assistant_functional/common/ui/index.ts index 3a12e17860254..3fa265cfa5489 100644 --- a/x-pack/test/observability_ai_assistant_functional/common/ui/index.ts +++ b/x-pack/test/observability_ai_assistant_functional/common/ui/index.ts @@ -36,7 +36,11 @@ const pages = { setupGenAiConnectorsButtonSelector: 'observabilityAiAssistantInitialSetupPanelSetUpGenerativeAiConnectorButton', chatInput: 'observabilityAiAssistantChatPromptEditorTextArea', - retryButton: 'observabilityAiAssistantWelcomeMessageSetUpKnowledgeBaseButton', + installKnowledgeBaseButton: 'observabilityAiAssistantWelcomeMessageSetUpKnowledgeBaseButton', + settingUpKnowledgeBase: 'observabilityAiAssistantWelcomeMessageSettingUpKnowledgeBaseText', + selectModelDropdown: 'observabilityAiAssistantKnowledgeBaseModelDropdown', + pendingModelText: 'observabilityAiAssistantKnowledgeBaseModelPendingText', + redeployModelButton: 'observabilityAiAssistantKnowledgeBaseReDeployModelButton', conversationLink: 'observabilityAiAssistantConversationsLink', positiveFeedbackButton: 'observabilityAiAssistantFeedbackButtonsPositiveButton', connectorsErrorMsg: 'observabilityAiAssistantConnectorsError', diff --git a/x-pack/test/observability_ai_assistant_functional/tests/contextual_insights/index.spec.ts b/x-pack/test/observability_ai_assistant_functional/tests/contextual_insights/index.spec.ts index 76249480ffa2d..ec63e49b73c2d 100644 --- a/x-pack/test/observability_ai_assistant_functional/tests/contextual_insights/index.spec.ts +++ b/x-pack/test/observability_ai_assistant_functional/tests/contextual_insights/index.spec.ts @@ -122,7 +122,7 @@ export default function ApiTest({ getService, getPageObjects }: FtrProviderConte it('should show the contextual insight component on the APM error details page', async () => { await navigateToError(); - void proxy.interceptConversation('This error is nothing to worry about. Have a nice day!'); + void proxy.interceptWithResponse('This error is nothing to worry about. Have a nice day!'); await openContextualInsights(); diff --git a/x-pack/test/observability_ai_assistant_functional/tests/conversations/archiving.spec.ts b/x-pack/test/observability_ai_assistant_functional/tests/conversations/archiving.spec.ts index 50facf485697a..5be4b085fe9f2 100644 --- a/x-pack/test/observability_ai_assistant_functional/tests/conversations/archiving.spec.ts +++ b/x-pack/test/observability_ai_assistant_functional/tests/conversations/archiving.spec.ts @@ -16,7 +16,7 @@ import { deleteConversations } from '../../common/conversations'; import { interceptRequest } from '../../common/intercept_request'; export default function ApiTest({ getService, getPageObjects }: FtrProviderContext) { - const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); const ui = getService('observabilityAIAssistantUI'); const testSubjects = getService('testSubjects'); const supertest = getService('supertest'); @@ -38,7 +38,7 @@ export default function ApiTest({ getService, getPageObjects }: FtrProviderConte }); void proxy.interceptTitle(expectedTitle); - void proxy.interceptConversation(expectedResponse); + void proxy.interceptWithResponse(expectedResponse); await testSubjects.setValue(ui.pages.conversations.chatInput, 'Hello'); await testSubjects.pressEnter(ui.pages.conversations.chatInput); diff --git a/x-pack/test/observability_ai_assistant_functional/tests/conversations/index.spec.ts b/x-pack/test/observability_ai_assistant_functional/tests/conversations/index.spec.ts index 5ca02f8607335..89b47023fee06 100644 --- a/x-pack/test/observability_ai_assistant_functional/tests/conversations/index.spec.ts +++ b/x-pack/test/observability_ai_assistant_functional/tests/conversations/index.spec.ts @@ -16,7 +16,6 @@ import { createLlmProxy, LlmProxy, } from '../../../observability_ai_assistant_api_integration/common/create_llm_proxy'; -import { interceptRequest } from '../../common/intercept_request'; import { FtrProviderContext } from '../../ftr_provider_context'; import { editor } from '../../../observability_ai_assistant_api_integration/common/users/users'; @@ -24,7 +23,7 @@ import { deleteConnectors } from '../../common/connectors'; import { deleteConversations } from '../../common/conversations'; export default function ApiTest({ getService, getPageObjects }: FtrProviderContext) { - const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); const ui = getService('observabilityAIAssistantUI'); const testSubjects = getService('testSubjects'); const browser = getService('browser'); @@ -33,13 +32,8 @@ export default function ApiTest({ getService, getPageObjects }: FtrProviderConte const retry = getService('retry'); const log = getService('log'); const telemetry = getService('kibana_ebt_ui'); - - const driver = getService('__webdriver__'); - const toasts = getService('toasts'); - const { header } = getPageObjects(['header', 'security']); - const flyoutService = getService('flyout'); async function login(username: string, password: string | undefined) { @@ -166,18 +160,7 @@ export default function ApiTest({ getService, getPageObjects }: FtrProviderConte ); await testSubjects.setValue(ui.pages.createConnectorFlyout.apiKeyInput, 'myApiKey'); - // intercept the request to set up the knowledge base, - // so we don't have to wait until it's fully downloaded - await interceptRequest( - driver.driver, - '*kb\\/setup*', - (responseFactory) => { - return responseFactory.fail(); - }, - async () => { - await testSubjects.clickWhenNotDisabled(ui.pages.createConnectorFlyout.saveButton); - } - ); + await testSubjects.clickWhenNotDisabled(ui.pages.createConnectorFlyout.saveButton); await retry.waitFor('Connector created toast', async () => { const count = await toasts.getCount(); @@ -201,7 +184,7 @@ export default function ApiTest({ getService, getPageObjects }: FtrProviderConte }); it('shows a setup kb button', async () => { - await testSubjects.existOrFail(ui.pages.conversations.retryButton); + await testSubjects.existOrFail(ui.pages.conversations.installKnowledgeBaseButton); }); it('has an input field enabled', async () => { @@ -215,7 +198,7 @@ export default function ApiTest({ getService, getPageObjects }: FtrProviderConte before(async () => { void proxy.interceptTitle(expectedTitle); - void proxy.interceptConversation(expectedResponse); + void proxy.interceptWithResponse(expectedResponse); await testSubjects.setValue(ui.pages.conversations.chatInput, 'hello'); await testSubjects.pressEnter(ui.pages.conversations.chatInput); @@ -287,7 +270,7 @@ export default function ApiTest({ getService, getPageObjects }: FtrProviderConte describe('and adding another prompt', () => { before(async () => { - void proxy.interceptConversation('My second response'); + void proxy.interceptWithResponse('My second response'); await testSubjects.setValue(ui.pages.conversations.chatInput, 'hello'); await testSubjects.pressEnter(ui.pages.conversations.chatInput); @@ -371,7 +354,7 @@ export default function ApiTest({ getService, getPageObjects }: FtrProviderConte describe('and sending another prompt', () => { before(async () => { - void proxy.interceptConversation( + void proxy.interceptWithResponse( 'Service Level Indicators (SLIs) are quantifiable defined metrics that measure the performance and availability of a service or distributed system.' ); diff --git a/x-pack/test/observability_ai_assistant_functional/tests/conversations/sharing.spec.ts b/x-pack/test/observability_ai_assistant_functional/tests/conversations/sharing.spec.ts index 01b17f691f573..ac72fcec28ad1 100644 --- a/x-pack/test/observability_ai_assistant_functional/tests/conversations/sharing.spec.ts +++ b/x-pack/test/observability_ai_assistant_functional/tests/conversations/sharing.spec.ts @@ -16,7 +16,7 @@ import { deleteConversations } from '../../common/conversations'; import { interceptRequest } from '../../common/intercept_request'; export default function ApiTest({ getService, getPageObjects }: FtrProviderContext) { - const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); const ui = getService('observabilityAIAssistantUI'); const testSubjects = getService('testSubjects'); const supertest = getService('supertest'); @@ -38,7 +38,7 @@ export default function ApiTest({ getService, getPageObjects }: FtrProviderConte }); void proxy.interceptTitle(expectedTitle); - void proxy.interceptConversation(expectedResponse); + void proxy.interceptWithResponse(expectedResponse); await testSubjects.setValue(ui.pages.conversations.chatInput, 'Hello'); await testSubjects.pressEnter(ui.pages.conversations.chatInput); diff --git a/x-pack/test/observability_ai_assistant_functional/tests/knowledge_base/index.spec.ts b/x-pack/test/observability_ai_assistant_functional/tests/knowledge_base/index.spec.ts new file mode 100644 index 0000000000000..c3c8e068a479d --- /dev/null +++ b/x-pack/test/observability_ai_assistant_functional/tests/knowledge_base/index.spec.ts @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import expect from '@kbn/expect'; +import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/common'; +import { + LlmProxy, + createLlmProxy, +} from '../../../observability_ai_assistant_api_integration/common/create_llm_proxy'; +import { FtrProviderContext } from '../../ftr_provider_context'; +import { + deployTinyElserAndSetupKb, + stopTinyElserModel, + teardownTinyElserModelAndInferenceEndpoint, +} from '../../../api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/model_and_inference'; +import { clearKnowledgeBase } from '../../../api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/knowledge_base'; +import { createConnector, deleteConnectors } from '../../common/connectors'; + +export default function ApiTest({ getService }: FtrProviderContext) { + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + const ui = getService('observabilityAIAssistantUI'); + const testSubjects = getService('testSubjects'); + const retry = getService('retry'); + const log = getService('log'); + const es = getService('es'); + const supertest = getService('supertest'); + const browser = getService('browser'); + + describe('Knowledge Base', () => { + let proxy: LlmProxy; + + before(async () => { + proxy = await createLlmProxy(log); + + await clearKnowledgeBase(es); + await deleteConnectors(supertest); + await createConnector(proxy, supertest); + + await ui.auth.login('editor'); + await ui.router.goto('/conversations/new', { path: {}, query: {} }); + }); + + after(async () => { + await clearKnowledgeBase(es); + await teardownTinyElserModelAndInferenceEndpoint(getService); + await deleteConnectors(supertest); + + proxy.close(); + await ui.auth.logout(); + }); + + it('shows model dropdown and install button before the KB is installed', async () => { + await testSubjects.existOrFail(ui.pages.conversations.selectModelDropdown); + await testSubjects.existOrFail(ui.pages.conversations.installKnowledgeBaseButton); + }); + + it('should not show the setting up knowledge base loader after the KB is installed', async () => { + await deployTinyElserAndSetupKb(getService); + await browser.refresh(); + + await testSubjects.missingOrFail(ui.pages.conversations.settingUpKnowledgeBase); + }); + + it('should show a button to re-deploy the model if the model has been stopped', async () => { + await stopTinyElserModel(getService); + await browser.refresh(); + + await testSubjects.existOrFail(ui.pages.conversations.pendingModelText); + await testSubjects.existOrFail(ui.pages.conversations.redeployModelButton); + + await retry.try(async () => { + const response = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/kb/status', + }); + + expect(response.body?.kbState).to.eql(KnowledgeBaseState.MODEL_PENDING_DEPLOYMENT); + }); + }); + + it('should show re-deploy is in progress when the re-deploy button is clicked', async () => { + await testSubjects.click(ui.pages.conversations.redeployModelButton); + + const deployingText = await testSubjects.getVisibleText( + ui.pages.conversations.pendingModelText + ); + expect(deployingText).to.contain('Re-deploying knowledge base model'); + }); + }); +} diff --git a/x-pack/test/observability_ai_assistant_functional/tests/knowledge_base_management/index.spec.ts b/x-pack/test/observability_ai_assistant_functional/tests/knowledge_base_management/index.spec.ts index 87ada1e65a754..71d9305febc9a 100644 --- a/x-pack/test/observability_ai_assistant_functional/tests/knowledge_base_management/index.spec.ts +++ b/x-pack/test/observability_ai_assistant_functional/tests/knowledge_base_management/index.spec.ts @@ -8,21 +8,18 @@ import expect from '@kbn/expect'; import { subj as testSubjSelector } from '@kbn/test-subj-selector'; import { - TINY_ELSER, - clearKnowledgeBase, - importTinyElserModel, - deleteInferenceEndpoint, - deleteKnowledgeBaseModel, -} from '../../../observability_ai_assistant_api_integration/tests/knowledge_base/helpers'; + deployTinyElserAndSetupKb, + teardownTinyElserModelAndInferenceEndpoint, +} from '../../../api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/model_and_inference'; +import { clearKnowledgeBase } from '../../../api_integration/deployment_agnostic/apis/observability/ai_assistant/utils/knowledge_base'; import { ObservabilityAIAssistantApiClient } from '../../../observability_ai_assistant_api_integration/common/observability_ai_assistant_api_client'; import { FtrProviderContext } from '../../ftr_provider_context'; export default function ApiTest({ getService, getPageObjects }: FtrProviderContext) { - const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); const ui = getService('observabilityAIAssistantUI'); const testSubjects = getService('testSubjects'); const log = getService('log'); - const ml = getService('ml'); const es = getService('es'); const { common } = getPageObjects(['common']); @@ -51,32 +48,13 @@ export default function ApiTest({ getService, getPageObjects }: FtrProviderConte describe('Knowledge management tab', () => { before(async () => { await clearKnowledgeBase(es); - - // create a knowledge base model - await importTinyElserModel(ml); - - await Promise.all([ - // setup the knowledge base - observabilityAIAssistantAPIClient - .admin({ - endpoint: 'POST /internal/observability_ai_assistant/kb/setup', - params: { - query: { - model_id: TINY_ELSER.id, - }, - }, - }) - .expect(200), - - // login as editor - ui.auth.login('editor'), - ]); + await deployTinyElserAndSetupKb(getService); + await ui.auth.login('editor'); }); after(async () => { await Promise.all([ - deleteKnowledgeBaseModel(ml), - deleteInferenceEndpoint({ es }), + teardownTinyElserModelAndInferenceEndpoint(getService), clearKnowledgeBase(es), ui.auth.logout(), ]);