Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
70 commits
Select commit Hold shift + click to select a range
f04ab4c
[AI Connector] Change completion subAction schema to be OpenAI compat…
YulNaumenko Nov 14, 2024
af94594
-
YulNaumenko Nov 21, 2024
697c73c
Merge branch 'main' into ai-connector-inference-completion-openai
YulNaumenko Dec 2, 2024
3f076c2
oh
stephmilovic Dec 4, 2024
299324b
more
stephmilovic Dec 4, 2024
7428e6f
Merge branch 'main' into ai-connector-inference-completion-openai
YulNaumenko Dec 5, 2024
f17c678
Merge branch 'ai-connector-inference-completion-openai' of github.com…
YulNaumenko Dec 5, 2024
d34a29b
fix for preconfig
stephmilovic Dec 5, 2024
beed706
rm log
stephmilovic Dec 5, 2024
75a1624
revert
stephmilovic Dec 5, 2024
b37f976
reverts
stephmilovic Dec 5, 2024
2847a11
added unified completion support
YulNaumenko Dec 6, 2024
3a03150
-
YulNaumenko Dec 6, 2024
f53b5bf
Merge branch 'main' into inference_assistant
stephmilovic Dec 10, 2024
4aed034
Merge branch 'inference_assistant' into inference_assistant_unified
stephmilovic Dec 10, 2024
f4253b6
wip
stephmilovic Dec 10, 2024
f14b3ec
Merge branch 'main' into ai-connector-inference-completion-openai
YulNaumenko Dec 10, 2024
bf34578
added dashboard and async iterator
YulNaumenko Dec 11, 2024
6172e5b
Merge remote-tracking branch 'yuliia/ai-connector-inference-completio…
stephmilovic Dec 11, 2024
f3b6a75
fixed headers
YulNaumenko Dec 12, 2024
6f34515
wip
stephmilovic Dec 12, 2024
72041b5
fixed params
YulNaumenko Dec 12, 2024
9cec4cb
Merge branch 'main' into ai-connector-inference-completion-openai
YulNaumenko Dec 12, 2024
5d28bab
Merge branch 'main' into inference_assistant_unified
stephmilovic Dec 12, 2024
6cd1d03
Merge remote-tracking branch 'yuliia/ai-connector-inference-completio…
stephmilovic Dec 12, 2024
79c5c3b
made the regular stream and non-stream working
YulNaumenko Dec 16, 2024
27512b5
Merge branch 'main' into ai-connector-inference-completion-openai
YulNaumenko Dec 16, 2024
8ce2707
Merge branch 'ai-connector-inference-completion-openai' of github.com…
YulNaumenko Dec 16, 2024
cc7993b
merge fix
YulNaumenko Dec 16, 2024
166a22b
Merge remote-tracking branch 'upstream/main' into ai-connector-infere…
YulNaumenko Dec 16, 2024
7cbb869
Merge remote-tracking branch 'yuliia/ai-connector-inference-completio…
stephmilovic Dec 16, 2024
4a39b10
update subactions
stephmilovic Dec 16, 2024
8cd3754
wip
stephmilovic Dec 16, 2024
e0ee923
tool calls fix
YulNaumenko Dec 16, 2024
8626913
Merge remote-tracking branch 'yuliia/ai-connector-inference-completio…
stephmilovic Dec 16, 2024
042e813
improved
YulNaumenko Dec 16, 2024
c1ce23f
Merge remote-tracking branch 'yuliia/ai-connector-inference-completio…
stephmilovic Dec 16, 2024
6ded610
-
YulNaumenko Dec 16, 2024
2796a63
Merge remote-tracking branch 'yuliia/ai-connector-inference-completio…
stephmilovic Dec 16, 2024
56584d8
fixes and rm logs
stephmilovic Dec 16, 2024
36a6f67
streaming
YulNaumenko Dec 17, 2024
a70051b
fixed test
YulNaumenko Dec 17, 2024
f811503
fixed streaming
YulNaumenko Dec 17, 2024
c0d89c4
Merge remote-tracking branch 'yuliia/ai-connector-inference-completio…
stephmilovic Dec 17, 2024
2d2ebde
Merge remote-tracking branch 'upstream/main' into ai-connector-infere…
YulNaumenko Dec 17, 2024
232fbe9
fixed due to comments
YulNaumenko Dec 17, 2024
d44c560
Merge remote-tracking branch 'yuliia/ai-connector-inference-completio…
stephmilovic Dec 17, 2024
ed6107b
excluded n
YulNaumenko Dec 17, 2024
cf744b7
AD
stephmilovic Dec 17, 2024
c564aa3
AD works, rm logs
stephmilovic Dec 17, 2024
e0b95ce
Merge remote-tracking branch 'yuliia/ai-connector-inference-completio…
stephmilovic Dec 17, 2024
e8b9beb
Merge branch 'main' into inference_assistant_unified
stephmilovic Dec 30, 2024
0b2dcdf
revert inference changes
stephmilovic Dec 30, 2024
395c2f2
reason
stephmilovic Jan 2, 2025
dbef0fb
fix openai schema
stephmilovic Jan 2, 2025
ef868bc
Merge branch 'main' into inference_assistant_unified
stephmilovic Jan 2, 2025
bf9d13c
preconfig only
stephmilovic Jan 3, 2025
11f5f73
better error handling
stephmilovic Jan 6, 2025
36dd8b1
inferenceEnabled flag added
stephmilovic Jan 6, 2025
6bcc2c2
unit tests
stephmilovic Jan 6, 2025
2fc3283
fix openai type
stephmilovic Jan 6, 2025
df4592e
Merge branch 'main' into inference_assistant_unified
elasticmachine Jan 6, 2025
8141bf6
revert FF
stephmilovic Jan 7, 2025
b46a611
functions only if defined
stephmilovic Jan 7, 2025
fe5113a
Merge branch 'main' into inference_assistant_unified
stephmilovic Jan 7, 2025
d13c3e3
better
stephmilovic Jan 7, 2025
fb8e398
fix llm.ts
stephmilovic Jan 7, 2025
2559028
Merge branch 'main' into inference_assistant_unified
elasticmachine Jan 7, 2025
c6e4e63
pr comments and more tests
stephmilovic Jan 7, 2025
4929580
Merge branch 'inference_assistant_unified' of github.com:stephmilovic…
stephmilovic Jan 7, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ export interface AssistantProviderProps {
children: React.ReactNode;
getComments: GetAssistantMessages;
http: HttpSetup;
inferenceEnabled?: boolean;
baseConversations: Record<string, Conversation>;
nameSpace?: string;
navigateToApp: (appId: string, options?: NavigateToAppOptions | undefined) => Promise<void>;
Expand Down Expand Up @@ -102,6 +103,7 @@ export interface UseAssistantContext {
currentUserAvatar?: UserAvatar;
getComments: GetAssistantMessages;
http: HttpSetup;
inferenceEnabled: boolean;
knowledgeBase: KnowledgeBaseConfig;
getLastConversationId: (conversationTitle?: string) => string;
promptContexts: Record<string, PromptContext>;
Expand Down Expand Up @@ -144,6 +146,7 @@ export const AssistantProvider: React.FC<AssistantProviderProps> = ({
children,
getComments,
http,
inferenceEnabled = false,
baseConversations,
navigateToApp,
nameSpace = DEFAULT_ASSISTANT_NAMESPACE,
Expand Down Expand Up @@ -276,6 +279,7 @@ export const AssistantProvider: React.FC<AssistantProviderProps> = ({
docLinks,
getComments,
http,
inferenceEnabled,
knowledgeBase: {
...DEFAULT_KNOWLEDGE_BASE_SETTINGS,
...localStorageKnowledgeBase,
Expand Down Expand Up @@ -317,6 +321,7 @@ export const AssistantProvider: React.FC<AssistantProviderProps> = ({
docLinks,
getComments,
http,
inferenceEnabled,
localStorageKnowledgeBase,
promptContexts,
navigateToApp,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,12 +97,10 @@ export const ConnectorSelector: React.FC<Props> = React.memo(
const connectorOptions = useMemo(
() =>
(aiConnectors ?? []).map((connector) => {
const connectorTypeTitle =
getGenAiConfig(connector)?.apiProvider ??
getActionTypeTitle(actionTypeRegistry.get(connector.actionTypeId));
const connectorDetails = connector.isPreconfigured
? i18n.PRECONFIGURED_CONNECTOR
: connectorTypeTitle;
: getGenAiConfig(connector)?.apiProvider ??
getActionTypeTitle(actionTypeRegistry.get(connector.actionTypeId));
const attackDiscoveryStats =
stats !== null
? stats.statsPerConnector.find((s) => s.connectorId === connector.id) ?? null
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ interface Props {
actionTypeSelectorInline: boolean;
}
const itemClassName = css`
inline-size: 220px;
inline-size: 150px;
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

While this PR does not include the inference connector in the action type selector modal, it will in the future. Leaving this design change in to accommodate 4 connector types in the future


.euiKeyPadMenuItem__label {
white-space: nowrap;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,10 +68,11 @@ export const getConnectorTypeTitle = (
if (!connector) {
return null;
}
const connectorTypeTitle =
getGenAiConfig(connector)?.apiProvider ??
getActionTypeTitle(actionTypeRegistry.get(connector.actionTypeId));
const actionType = connector.isPreconfigured ? PRECONFIGURED_CONNECTOR : connectorTypeTitle;

const actionType = connector.isPreconfigured
? PRECONFIGURED_CONNECTOR
: getGenAiConfig(connector)?.apiProvider ??
getActionTypeTitle(actionTypeRegistry.get(connector.actionTypeId));

return actionType;
};
Original file line number Diff line number Diff line change
Expand Up @@ -41,18 +41,12 @@ export const useLoadActionTypes = ({
featureId: GenerativeAIForSecurityConnectorFeatureId,
});

const actionTypeKey = {
bedrock: '.bedrock',
openai: '.gen-ai',
gemini: '.gemini',
};
// TODO add .inference once all the providers support unified completion
const actionTypes = ['.bedrock', '.gen-ai', '.gemini'];

const sortedData = queryResult
.filter((p) =>
[actionTypeKey.bedrock, actionTypeKey.openai, actionTypeKey.gemini].includes(p.id)
)
return queryResult
.filter((p) => actionTypes.includes(p.id))
.sort((a, b) => a.name.localeCompare(b.name));
return sortedData;
},
{
retry: false,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
import { waitFor, renderHook } from '@testing-library/react';
import { useLoadConnectors, Props } from '.';
import { mockConnectors } from '../../mock/connectors';
import { TestProviders } from '../../mock/test_providers/test_providers';
import React, { ReactNode } from 'react';

const mockConnectorsAndExtras = [
...mockConnectors,
Expand Down Expand Up @@ -45,50 +47,73 @@ const loadConnectorsResult = mockConnectors.map((c) => ({
isSystemAction: false,
}));

jest.mock('@tanstack/react-query', () => ({
useQuery: jest.fn().mockImplementation(async (queryKey, fn, opts) => {
try {
const res = await fn();
return Promise.resolve(res);
} catch (e) {
opts.onError(e);
}
}),
}));

const http = {
get: jest.fn().mockResolvedValue(connectorsApiResponse),
};
const toasts = {
addError: jest.fn(),
};
const defaultProps = { http, toasts } as unknown as Props;

const createWrapper = (inferenceEnabled = false) => {
// eslint-disable-next-line react/display-name
return ({ children }: { children: ReactNode }) => (
<TestProviders providerContext={{ inferenceEnabled }}>{children}</TestProviders>
);
};

describe('useLoadConnectors', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('should call api to load action types', async () => {
renderHook(() => useLoadConnectors(defaultProps));
renderHook(() => useLoadConnectors(defaultProps), {
wrapper: TestProviders,
});
await waitFor(() => {
expect(defaultProps.http.get).toHaveBeenCalledWith('/api/actions/connectors');
expect(toasts.addError).not.toHaveBeenCalled();
});
});

it('should return sorted action types, removing isMissingSecrets and wrong action type ids', async () => {
const { result } = renderHook(() => useLoadConnectors(defaultProps));
it('should return sorted action types, removing isMissingSecrets and wrong action type ids, excluding .inference results', async () => {
const { result } = renderHook(() => useLoadConnectors(defaultProps), {
wrapper: TestProviders,
});
await waitFor(() => {
expect(result.current.data).toStrictEqual(
loadConnectorsResult
.filter((c) => c.actionTypeId !== '.inference')
// @ts-ignore ts does not like config, but we define it in the mock data
.map((c) => ({ ...c, apiProvider: c.config.apiProvider }))
);
});
});

it('includes preconfigured .inference results when inferenceEnabled is true', async () => {
const { result } = renderHook(() => useLoadConnectors(defaultProps), {
wrapper: createWrapper(true),
});
await waitFor(() => {
expect(result.current).resolves.toStrictEqual(
// @ts-ignore ts does not like config, but we define it in the mock data
loadConnectorsResult.map((c) => ({ ...c, apiProvider: c.config.apiProvider }))
expect(result.current.data).toStrictEqual(
mockConnectors
.filter(
(c) =>
c.actionTypeId !== '.inference' ||
(c.actionTypeId === '.inference' && c.isPreconfigured)
)
// @ts-ignore ts does not like config, but we define it in the mock data
.map((c) => ({ ...c, referencedByCount: 0, apiProvider: c?.config?.apiProvider }))
);
});
});
it('should display error toast when api throws error', async () => {
const mockHttp = {
get: jest.fn().mockRejectedValue(new Error('this is an error')),
} as unknown as Props['http'];
renderHook(() => useLoadConnectors({ ...defaultProps, http: mockHttp }));
renderHook(() => useLoadConnectors({ ...defaultProps, http: mockHttp }), {
wrapper: TestProviders,
});
await waitFor(() => expect(toasts.addError).toHaveBeenCalled());
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import type { IHttpFetchError } from '@kbn/core-http-browser';
import { HttpSetup } from '@kbn/core-http-browser';
import { IToasts } from '@kbn/core-notifications-browser';
import { OpenAiProviderType } from '@kbn/stack-connectors-plugin/common/openai/constants';
import { useAssistantContext } from '../../assistant_context';
import { AIConnector } from '../connector_selector';
import * as i18n from '../translations';

Expand All @@ -27,16 +28,17 @@ export interface Props {
toasts?: IToasts;
}

const actionTypeKey = {
bedrock: '.bedrock',
openai: '.gen-ai',
gemini: '.gemini',
};
const actionTypes = ['.bedrock', '.gen-ai', '.gemini'];

export const useLoadConnectors = ({
http,
toasts,
}: Props): UseQueryResult<AIConnector[], IHttpFetchError> => {
const { inferenceEnabled } = useAssistantContext();
if (inferenceEnabled) {
actionTypes.push('.inference');
}

return useQuery(
QUERY_KEY,
async () => {
Expand All @@ -45,9 +47,9 @@ export const useLoadConnectors = ({
(acc: AIConnector[], connector) => [
...acc,
...(!connector.isMissingSecrets &&
[actionTypeKey.bedrock, actionTypeKey.openai, actionTypeKey.gemini].includes(
connector.actionTypeId
)
actionTypes.includes(connector.actionTypeId) &&
// only include preconfigured .inference connectors
(connector.actionTypeId !== '.inference' || connector.isPreconfigured)
? [
{
...connector,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,4 +71,26 @@ export const mockConnectors: AIConnector[] = [
apiProvider: 'OpenAI',
},
},
{
id: 'c29c28a0-20fe-11ee-9386-a1f4d42ec542',
name: 'Regular Inference Connector',
isMissingSecrets: false,
actionTypeId: '.inference',
secrets: {},
isPreconfigured: false,
isDeprecated: false,
isSystemAction: false,
config: {
apiProvider: 'OpenAI',
},
},
{
id: 'c29c28a0-20fe-11ee-9396-a1f4d42ec542',
name: 'Preconfigured Inference Connector',
isMissingSecrets: false,
actionTypeId: '.inference',
isPreconfigured: true,
isDeprecated: false,
isSystemAction: false,
},
];
Loading