Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ dependsOn:
- '@kbn/kibana-react-plugin'
- '@kbn/cloud-plugin'
- '@kbn/deeplinks-management'
- '@kbn/connector-schemas'
tags:
- shared-browser
- package
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import { httpServiceMock } from '@kbn/core-http-browser-mocks';
import { notificationServiceMock } from '@kbn/core-notifications-browser-mocks';
import { mockProviders } from '../utils/mock_providers';
import type { InferenceProvider } from '../types/types';
import { INTERNAL_OVERRIDE_FIELDS } from '../constants';

// Create a stable cloned copy for each test suite to prevent mutations from affecting other tests
// Note: Variable must be prefixed with 'mock' to be allowed in jest.mock()
Expand Down Expand Up @@ -122,6 +123,19 @@ describe('Inference Services', () => {
);
});

it('populates default model_id when selecting openai provider', async () => {
renderForm();

await userEvent.click(screen.getByTestId('provider-select'));
await userEvent.click(screen.getByText('OpenAI'));

expect(screen.getByTestId('provider-select')).toHaveValue('OpenAI');
const modelIdInput = screen.getByTestId('model_id-input');
// Default value comes from INTERNAL_OVERRIDE_FIELDS.openai.defaultValues.model_id
const expectedDefaultModel = INTERNAL_OVERRIDE_FIELDS.openai?.defaultValues?.model_id as string;
expect(modelIdInput).toHaveValue(expectedDefaultModel);
});

describe('isProviderForSolutions', () => {
it('should return true for provider with supported filter type', () => {
const provider = { service: 'amazonbedrock', name: 'Amazon Bedrock' } as InferenceProvider;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -235,13 +235,11 @@ export const InferenceServiceFormFields: React.FC<InferenceServicesProps> = ({
const newProvider = updatedProviders?.find(
(p) => p.service === (config.provider === '' ? providerSelected : config.provider)
);
const overrides = newProvider ? getOverrides(newProvider) : undefined;
const newProviderSchema: ConfigEntryView[] = newProvider
? mapProviderFields(taskType, newProvider, overrides)
: [];
if (newProvider) {
const overrides = getOverrides(newProvider);
const newProviderSchema: ConfigEntryView[] = mapProviderFields(
taskType,
newProvider,
overrides
);
setProviderSchema(newProviderSchema);
}

Expand All @@ -266,7 +264,9 @@ export const InferenceServiceFormFields: React.FC<InferenceServicesProps> = ({
newProvider?.configurations[k]?.supported_task_types &&
newProvider?.configurations[k].supported_task_types.includes(taskType)
) {
newConfig[k] = newProvider?.configurations[k]?.default_value ?? null;
// Get default value from schema (which includes overridden defaults from INTERNAL_OVERRIDE_FIELDS)
const schemaField = newProviderSchema.find((f) => f.key === k);
newConfig[k] = schemaField?.default_value ?? null;
}
});

Expand Down Expand Up @@ -318,11 +318,8 @@ export const InferenceServiceFormFields: React.FC<InferenceServicesProps> = ({

newProviderSchema.forEach((fieldConfig) => {
if (!fieldConfig.sensitive) {
if (fieldConfig && !!fieldConfig.default_value) {
defaultProviderConfig[fieldConfig.key] = fieldConfig.default_value;
} else {
defaultProviderConfig[fieldConfig.key] = null;
}
// default_value now includes overridden defaults from INTERNAL_OVERRIDE_FIELDS
defaultProviderConfig[fieldConfig.key] = fieldConfig.default_value;
} else {
defaultProviderSecrets[fieldConfig.key] = null;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,12 @@

import React from 'react';
import { EuiLink } from '@elastic/eui';
import { DEFAULT_MODEL as BEDROCK_DEFAULT_MODEL } from '@kbn/connector-schemas/bedrock/constants';
import { DEFAULT_MODEL as GEMINI_DEFAULT_MODEL } from '@kbn/connector-schemas/gemini/constants';
import { DEFAULT_MODEL as OPENAI_DEFAULT_MODEL } from '@kbn/connector-schemas/openai/constants';
import { GEMINI, DOCUMENTATION_BASE as DOCUMENTATION } from './translations';
import { FieldType, type InternalOverrideFieldsType } from './types/types';
import type { InternalOverrideFieldsType } from './types/types';
import { FieldType } from './types/types';

export enum ServiceProviderKeys {
'alibabacloud-ai-search' = 'alibabacloud-ai-search',
Expand Down Expand Up @@ -76,6 +80,7 @@ export const MAX_NUMBER_OF_ALLOCATIONS = 'max_number_of_allocations';
export const CONTEXT_WINDOW_LENGTH = 'contextWindowLength';

// This is a temporaray solution to handle the internal overrides for field configurations that have not been updated in the services endpoint
// defaultValues can be used to set default values for model_id fields for providers
export const INTERNAL_OVERRIDE_FIELDS: InternalOverrideFieldsType = {
[ServiceProviderKeys.elasticsearch]: {
hidden: ['num_allocations', 'num_threads'],
Expand All @@ -96,4 +101,17 @@ export const INTERNAL_OVERRIDE_FIELDS: InternalOverrideFieldsType = {
],
serverlessOnly: true,
},
// Default model values for providers
[ServiceProviderKeys.openai]: {
defaultValues: { model_id: OPENAI_DEFAULT_MODEL },
},
[ServiceProviderKeys.amazonbedrock]: {
defaultValues: { model: BEDROCK_DEFAULT_MODEL },
},
[ServiceProviderKeys.googlevertexai]: {
defaultValues: { model_id: GEMINI_DEFAULT_MODEL },
},
[ServiceProviderKeys.googleaistudio]: {
defaultValues: { model_id: GEMINI_DEFAULT_MODEL },
},
};
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@ export interface OverrideFieldsContentType {
serverlessOnly?: boolean;
hidden?: string[];
additional?: FieldsConfiguration[];
/** Default values to apply to existing provider configuration fields (e.g., model_id default values) */
defaultValues?: Record<string, string | number | boolean | null>;
}
export type InternalOverrideFieldsType = {
[Key in ServiceProviderKeysType | string]?: OverrideFieldsContentType;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,20 +102,25 @@ export const mapProviderFields = (
(newProvider.configurations[pk].supported_task_types ?? [taskType]).includes(taskType) &&
(fieldOverrides?.hidden ?? []).indexOf(pk) === -1
)
.map(
(k): ConfigEntryView => ({
.map((k): ConfigEntryView => {
// Use override defaultValues if provider config doesn't have a default_value set
const configDefaultValue = newProvider.configurations[k].default_value;
const overrideDefaultValue = fieldOverrides?.defaultValues?.[k];
const resolvedDefaultValue = configDefaultValue ?? overrideDefaultValue ?? null;

return {
key: k,
isValid: true,
validationErrors: [],
value: newProvider.configurations[k].default_value ?? null,
default_value: newProvider.configurations[k].default_value ?? null,
value: resolvedDefaultValue,
default_value: resolvedDefaultValue,
description: newProvider.configurations[k].description ?? null,
label: newProvider.configurations[k].label ?? '',
required: newProvider.configurations[k].required ?? false,
sensitive: newProvider.configurations[k].sensitive ?? false,
updatable: newProvider.configurations[k].updatable ?? false,
type: newProvider.configurations[k].type ?? FieldType.STRING,
supported_task_types: newProvider.configurations[k].supported_task_types ?? [],
})
);
};
});
};
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,43 @@ import type { InferenceProvider } from '../types/types';
import { FieldType } from '../types/types';

export const mockProviders: InferenceProvider[] = [
{
service: 'openai',
name: 'OpenAI',
task_types: ['completion', 'text_embedding'],
configurations: {
api_key: {
default_value: null,
description: `API Key for the provider you're connecting to.`,
label: 'API Key',
required: true,
sensitive: true,
updatable: true,
type: FieldType.STRING,
supported_task_types: ['completion', 'text_embedding'],
},
model_id: {
default_value: null,
description: 'The name of the model to use for the inference task.',
label: 'Model ID',
required: true,
sensitive: false,
updatable: true,
type: FieldType.STRING,
supported_task_types: ['completion', 'text_embedding'],
},
'rate_limit.requests_per_minute': {
default_value: null,
description: 'Minimize the number of rate limit errors.',
label: 'Rate Limit',
required: false,
sensitive: false,
updatable: true,
type: FieldType.INTEGER,
supported_task_types: ['completion', 'text_embedding'],
},
},
},
{
service: 'hugging_face',
name: 'Hugging Face',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
"@kbn/search-api-panels",
"@kbn/kibana-react-plugin",
"@kbn/cloud-plugin",
"@kbn/deeplinks-management"
"@kbn/deeplinks-management",
"@kbn/connector-schemas"
]
}
Loading