diff --git a/server/adaptors/integrations/__test__/builder.test.ts b/server/adaptors/integrations/__test__/builder.test.ts index 33aff3497f..e7819bb086 100644 --- a/server/adaptors/integrations/__test__/builder.test.ts +++ b/server/adaptors/integrations/__test__/builder.test.ts @@ -5,7 +5,13 @@ import { SavedObjectsClientContract } from '../../../../../../src/core/server'; import { IntegrationInstanceBuilder } from '../integrations_builder'; -import { IntegrationReader } from '../repository/integration'; +import { IntegrationReader } from '../repository/integration_reader'; +import * as mockUtils from '../repository/utils'; + +jest.mock('../repository/utils', () => ({ + ...jest.requireActual('../repository/utils'), + deepCheck: jest.fn(), +})); const mockSavedObjectsClient: SavedObjectsClientContract = ({ bulkCreate: jest.fn(), @@ -17,7 +23,6 @@ const mockSavedObjectsClient: SavedObjectsClientContract = ({ } as unknown) as SavedObjectsClientContract; const sampleIntegration: IntegrationReader = ({ - deepCheck: jest.fn().mockResolvedValue(true), getAssets: jest.fn().mockResolvedValue({ savedObjects: [ { @@ -104,8 +109,12 @@ describe('IntegrationInstanceBuilder', () => { }, }; + jest + .spyOn(mockUtils, 'deepCheck') + .mockResolvedValue({ ok: true, value: mockTemplate as IntegrationConfig }); + // Mock the implementation of the methods in the Integration class - sampleIntegration.deepCheck = jest.fn().mockResolvedValue({ ok: true, value: mockTemplate }); + // sampleIntegration.deepCheck = jest.fn().mockResolvedValue({ ok: true, value: mockTemplate }); sampleIntegration.getAssets = jest .fn() .mockResolvedValue({ ok: true, value: { savedObjects: remappedAssets } }); @@ -119,7 +128,6 @@ describe('IntegrationInstanceBuilder', () => { const instance = await builder.build(sampleIntegration, options); - expect(sampleIntegration.deepCheck).toHaveBeenCalled(); expect(sampleIntegration.getAssets).toHaveBeenCalled(); expect(remapIDsSpy).toHaveBeenCalledWith(remappedAssets); expect(postAssetsSpy).toHaveBeenCalledWith(remappedAssets); @@ -131,8 +139,8 @@ describe('IntegrationInstanceBuilder', () => { dataSource: 'instance-datasource', name: 'instance-name', }; - sampleIntegration.deepCheck = jest - .fn() + jest + .spyOn(mockUtils, 'deepCheck') .mockResolvedValue({ ok: false, error: new Error('Mock error') }); await expect(builder.build(sampleIntegration, options)).rejects.toThrowError('Mock error'); @@ -145,7 +153,9 @@ describe('IntegrationInstanceBuilder', () => { }; const errorMessage = 'Failed to get assets'; - sampleIntegration.deepCheck = jest.fn().mockResolvedValue({ ok: true, value: {} }); + jest + .spyOn(mockUtils, 'deepCheck') + .mockResolvedValue({ ok: true, value: ({} as unknown) as IntegrationConfig }); sampleIntegration.getAssets = jest .fn() .mockResolvedValue({ ok: false, error: new Error(errorMessage) }); @@ -165,7 +175,9 @@ describe('IntegrationInstanceBuilder', () => { }, ]; const errorMessage = 'Failed to post assets'; - sampleIntegration.deepCheck = jest.fn().mockResolvedValue({ ok: true, value: {} }); + jest + .spyOn(mockUtils, 'deepCheck') + .mockResolvedValue({ ok: true, value: ({} as unknown) as IntegrationConfig }); sampleIntegration.getAssets = jest .fn() .mockResolvedValue({ ok: true, value: { savedObjects: remappedAssets } }); @@ -180,10 +192,14 @@ describe('IntegrationInstanceBuilder', () => { const assets = [ { id: 'asset1', + type: 'unknown', + attributes: { title: 'asset1' }, references: [{ id: 'ref1' }, { id: 'ref2' }], }, { id: 'asset2', + type: 'unknown', + attributes: { title: 'asset1' }, references: [{ id: 'ref1' }, { id: 'ref3' }], }, ]; @@ -200,7 +216,7 @@ describe('IntegrationInstanceBuilder', () => { const remappedAssets = builder.remapIDs(assets); - expect(remappedAssets).toEqual(expectedRemappedAssets); + expect(remappedAssets).toMatchObject(expectedRemappedAssets); }); }); diff --git a/server/adaptors/integrations/__test__/json_repository.test.ts b/server/adaptors/integrations/__test__/json_repository.test.ts new file mode 100644 index 0000000000..18872913ce --- /dev/null +++ b/server/adaptors/integrations/__test__/json_repository.test.ts @@ -0,0 +1,225 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +/** + * Serialization tests for integrations in the local repository. + */ + +import { TemplateManager } from '../repository/repository'; +import { IntegrationReader } from '../repository/integration_reader'; +import path from 'path'; +import * as fs from 'fs/promises'; +import { JsonCatalogDataAdaptor } from '../repository/json_data_adaptor'; +import { deepCheck, foldResults } from '../repository/utils'; + +const fetchSerializedIntegrations = async (): Promise> => { + const directory = path.join(__dirname, '../__data__/repository'); + const folders = await fs.readdir(directory); + const readers = await Promise.all( + folders.map(async (folder) => { + const integPath = path.join(directory, folder); + if (!(await fs.lstat(integPath)).isDirectory()) { + // If it's not a directory (e.g. a README), skip it + return Promise.resolve(null); + } + // Otherwise, all directories must be integrations + return new IntegrationReader(integPath); + }) + ); + const serializedIntegrationResults = await Promise.all( + (readers.filter((x) => x !== null) as IntegrationReader[]).map((r) => r.serialize()) + ); + return foldResults(serializedIntegrationResults); +}; + +describe('The Local Serialized Catalog', () => { + it('Should serialize without errors', async () => { + const serialized = await fetchSerializedIntegrations(); + expect(serialized.ok).toBe(true); + }); + + it('Should pass deep validation for all serialized integrations', async () => { + const serialized = await fetchSerializedIntegrations(); + const repository = new TemplateManager( + '.', + new JsonCatalogDataAdaptor(serialized.value as SerializedIntegration[]) + ); + + for (const integ of await repository.getIntegrationList()) { + const validationResult = await deepCheck(integ); + await expect(validationResult).toHaveProperty('ok', true); + } + }); + + it('Should correctly retrieve a logo', async () => { + const serialized = await fetchSerializedIntegrations(); + const repository = new TemplateManager( + '.', + new JsonCatalogDataAdaptor(serialized.value as SerializedIntegration[]) + ); + const integration = (await repository.getIntegration('nginx')) as IntegrationReader; + const logoStatic = await integration.getStatic('logo.svg'); + + expect(logoStatic).toHaveProperty('ok', true); + expect((logoStatic.value as Buffer).length).toBeGreaterThan(1000); + }); + + it('Should correctly retrieve a gallery image', async () => { + const serialized = await fetchSerializedIntegrations(); + const repository = new TemplateManager( + '.', + new JsonCatalogDataAdaptor(serialized.value as SerializedIntegration[]) + ); + const integration = (await repository.getIntegration('nginx')) as IntegrationReader; + const logoStatic = await integration.getStatic('dashboard1.png'); + + expect(logoStatic).toHaveProperty('ok', true); + expect((logoStatic.value as Buffer).length).toBeGreaterThan(1000); + }); + + it('Should correctly retrieve a dark mode logo', async () => { + const TEST_INTEGRATION = 'nginx'; + const serialized = await fetchSerializedIntegrations(); + const config = (serialized.value as SerializedIntegration[]).filter( + (integ: { name: string; components: unknown[] }) => integ.name === TEST_INTEGRATION + )[0]; + + if (!config.statics) { + throw new Error('NginX integration missing statics (invalid test)'); + } + config.statics.darkModeGallery = config.statics.gallery; + config.statics.darkModeLogo = { + ...(config.statics.logo as SerializedStaticAsset), + path: 'dark_logo.svg', + }; + + const reader = new IntegrationReader('nginx', new JsonCatalogDataAdaptor([config])); + + await expect(reader.getStatic('dark_logo.svg')).resolves.toHaveProperty('ok', true); + }); + + it('Should correctly re-serialize', async () => { + const TEST_INTEGRATION = 'nginx'; + const serialized = await fetchSerializedIntegrations(); + const config = (serialized.value as SerializedIntegration[]).filter( + (integ: { name: string }) => integ.name === TEST_INTEGRATION + )[0]; + + const reader = new IntegrationReader('nginx', new JsonCatalogDataAdaptor([config])); + const reserialized = await reader.serialize(); + + expect(reserialized.value).toEqual(config); + }); + + it('Should correctly re-serialize with dark mode values', async () => { + const TEST_INTEGRATION = 'nginx'; + const serialized = await fetchSerializedIntegrations(); + const config = (serialized.value as SerializedIntegration[]).filter( + (integ: { name: string }) => integ.name === TEST_INTEGRATION + )[0]; + + if (!config.statics) { + throw new Error('NginX integration missing statics (invalid test)'); + } + config.statics.darkModeGallery = config.statics.gallery; + config.statics.darkModeLogo = { + ...(config.statics.logo as SerializedStaticAsset), + path: 'dark_logo.svg', + }; + + const reader = new IntegrationReader('nginx', new JsonCatalogDataAdaptor([config])); + const reserialized = await reader.serialize(); + + expect(reserialized.value).toEqual(config); + }); +}); + +describe('Integration validation', () => { + it('Should correctly fail an integration without schemas', async () => { + const TEST_INTEGRATION = 'nginx'; + const serialized = await fetchSerializedIntegrations(); + const transformedSerialized = (serialized.value as SerializedIntegration[]) + .filter((integ: { name: string; components: unknown[] }) => integ.name === TEST_INTEGRATION) + .map((integ) => { + return { + ...integ, + components: [] as SerializedIntegrationComponent[], + }; + }); + const integration = new IntegrationReader( + TEST_INTEGRATION, + new JsonCatalogDataAdaptor(transformedSerialized) + ); + + await expect(deepCheck(integration)).resolves.toHaveProperty('ok', false); + }); + + it('Should correctly fail an integration without assets', async () => { + const TEST_INTEGRATION = 'nginx'; + const serialized = await fetchSerializedIntegrations(); + const transformedSerialized = (serialized.value as SerializedIntegration[]) + .filter((integ: { name: string; components: unknown[] }) => integ.name === TEST_INTEGRATION) + .map((integ) => { + return { + ...integ, + assets: {} as SerializedIntegrationAssets, + }; + }); + const integration = new IntegrationReader( + TEST_INTEGRATION, + new JsonCatalogDataAdaptor(transformedSerialized) + ); + + await expect(deepCheck(integration)).resolves.toHaveProperty('ok', false); + }); +}); + +describe('JSON Catalog with invalid data', () => { + it('Should report an error if images are missing data', async () => { + const TEST_INTEGRATION = 'nginx'; + const serialized = await fetchSerializedIntegrations(); + const baseConfig = (serialized.value as SerializedIntegration[]).filter( + (integ: { name: string; components: unknown[] }) => integ.name === TEST_INTEGRATION + )[0]; + + if (!baseConfig.statics) { + throw new Error('NginX integration missing statics (invalid test)'); + } + + baseConfig.statics = { + logo: { path: 'logo.svg' } as SerializedStaticAsset, + darkModeLogo: { path: 'dm_logo.svg' } as SerializedStaticAsset, + gallery: [{ path: '1.png' }] as SerializedStaticAsset[], + darkModeGallery: [{ path: 'dm_1.png' }] as SerializedStaticAsset[], + }; + const reader = new IntegrationReader( + TEST_INTEGRATION, + new JsonCatalogDataAdaptor([baseConfig]) + ); + + await expect(reader.getStatic('logo.svg')).resolves.toHaveProperty('ok', false); + await expect(reader.getStatic('dm_logo.svg')).resolves.toHaveProperty('ok', false); + await expect(reader.getStatic('1.png')).resolves.toHaveProperty('ok', false); + await expect(reader.getStatic('dm_1.png')).resolves.toHaveProperty('ok', false); + }); + + it('Should report an error on read if a schema has invalid JSON', async () => { + const TEST_INTEGRATION = 'nginx'; + const serialized = await fetchSerializedIntegrations(); + const baseConfig = (serialized.value as SerializedIntegration[]).filter( + (integ: { name: string; components: unknown[] }) => integ.name === TEST_INTEGRATION + )[0]; + + expect(baseConfig.components.length).toBeGreaterThanOrEqual(2); + baseConfig.components[1].data = '{"invalid_json": true'; + + const reader = new IntegrationReader( + TEST_INTEGRATION, + new JsonCatalogDataAdaptor([baseConfig]) + ); + + await expect(reader.getSchemas()).resolves.toHaveProperty('ok', false); + }); +}); diff --git a/server/adaptors/integrations/__test__/local_repository.test.ts b/server/adaptors/integrations/__test__/local_fs_repository.test.ts similarity index 52% rename from server/adaptors/integrations/__test__/local_repository.test.ts rename to server/adaptors/integrations/__test__/local_fs_repository.test.ts index 622547f116..dcacf02bbb 100644 --- a/server/adaptors/integrations/__test__/local_repository.test.ts +++ b/server/adaptors/integrations/__test__/local_fs_repository.test.ts @@ -3,10 +3,15 @@ * SPDX-License-Identifier: Apache-2.0 */ +/** + * This file is used as integration tests for Integrations Repository functionality. + */ + import { TemplateManager } from '../repository/repository'; -import { IntegrationReader } from '../repository/integration'; +import { IntegrationReader } from '../repository/integration_reader'; import path from 'path'; import * as fs from 'fs/promises'; +import { deepCheck } from '../repository/utils'; describe('The local repository', () => { it('Should only contain valid integration directories or files.', async () => { @@ -21,7 +26,7 @@ describe('The local repository', () => { } // Otherwise, all directories must be integrations const integ = new IntegrationReader(integPath); - expect(integ.getConfig()).resolves.toHaveProperty('ok', true); + await expect(integ.getConfig()).resolves.toHaveProperty('ok', true); }) ); }); @@ -33,7 +38,7 @@ describe('The local repository', () => { const integrations: IntegrationReader[] = await repository.getIntegrationList(); await Promise.all( integrations.map(async (i) => { - const result = await i.deepCheck(); + const result = await deepCheck(i); if (!result.ok) { console.error(result.error); } @@ -42,3 +47,28 @@ describe('The local repository', () => { ); }); }); + +describe('Local Nginx Integration', () => { + it('Should serialize without errors', async () => { + const repository: TemplateManager = new TemplateManager( + path.join(__dirname, '../__data__/repository') + ); + const integration = await repository.getIntegration('nginx'); + + await expect(integration?.serialize()).resolves.toHaveProperty('ok', true); + }); + + it('Should serialize to include the config', async () => { + const repository: TemplateManager = new TemplateManager( + path.join(__dirname, '../__data__/repository') + ); + const integration = await repository.getIntegration('nginx'); + const config = await integration!.getConfig(); + const serialized = await integration!.serialize(); + + expect(serialized).toHaveProperty('ok', true); + expect((serialized as { value: object }).value).toMatchObject( + (config as { value: object }).value + ); + }); +}); diff --git a/server/adaptors/integrations/__test__/manager.test.ts b/server/adaptors/integrations/__test__/manager.test.ts index 3ee62470c4..417f566ede 100644 --- a/server/adaptors/integrations/__test__/manager.test.ts +++ b/server/adaptors/integrations/__test__/manager.test.ts @@ -7,7 +7,7 @@ import { IntegrationsManager } from '../integrations_manager'; import { SavedObject, SavedObjectsClientContract } from '../../../../../../src/core/server/types'; import { TemplateManager } from '../repository/repository'; import { IntegrationInstanceBuilder } from '../integrations_builder'; -import { IntegrationReader } from '../repository/integration'; +import { IntegrationReader } from '../repository/integration_reader'; import { SavedObjectsFindResponse } from '../../../../../../src/core/server'; describe('IntegrationsKibanaBackend', () => { diff --git a/server/adaptors/integrations/integrations_builder.ts b/server/adaptors/integrations/integrations_builder.ts index 7a8026ceac..6866873d94 100644 --- a/server/adaptors/integrations/integrations_builder.ts +++ b/server/adaptors/integrations/integrations_builder.ts @@ -4,16 +4,23 @@ */ import { v4 as uuidv4 } from 'uuid'; -import { uuidRx } from 'public/components/custom_panels/redux/panel_slice'; import { SavedObjectsClientContract } from '../../../../../src/core/server'; -import { IntegrationReader } from './repository/integration'; +import { IntegrationReader } from './repository/integration_reader'; import { SavedObjectsBulkCreateObject } from '../../../../../src/core/public'; +import { deepCheck } from './repository/utils'; interface BuilderOptions { name: string; dataSource: string; } +interface SavedObject { + id: string; + type: string; + attributes: { title: string }; + references: Array<{ id: string }>; +} + export class IntegrationInstanceBuilder { client: SavedObjectsClientContract; @@ -22,8 +29,7 @@ export class IntegrationInstanceBuilder { } build(integration: IntegrationReader, options: BuilderOptions): Promise { - const instance = integration - .deepCheck() + const instance = deepCheck(integration) .then((result) => { if (!result.ok) { return Promise.reject(result.error); @@ -36,14 +42,17 @@ export class IntegrationInstanceBuilder { } return assets.value; }) - .then((assets) => this.remapIDs(assets.savedObjects!)) + .then((assets) => this.remapIDs(assets.savedObjects! as SavedObject[])) .then((assets) => this.remapDataSource(assets, options.dataSource)) .then((assets) => this.postAssets(assets)) .then((refs) => this.buildInstance(integration, refs, options)); return instance; } - remapDataSource(assets: any[], dataSource: string | undefined): any[] { + remapDataSource( + assets: SavedObject[], + dataSource: string | undefined + ): Array<{ type: string; attributes: { title: string } }> { if (!dataSource) return assets; assets = assets.map((asset) => { if (asset.type === 'index-pattern') { @@ -54,7 +63,7 @@ export class IntegrationInstanceBuilder { return assets; } - remapIDs(assets: any[]): any[] { + remapIDs(assets: SavedObject[]): SavedObject[] { const toRemap = assets.filter((asset) => asset.id); const idMap = new Map(); return toRemap.map((item) => { @@ -73,20 +82,22 @@ export class IntegrationInstanceBuilder { }); } - async postAssets(assets: any[]): Promise { + async postAssets(assets: SavedObjectsBulkCreateObject[]): Promise { try { - const response = await this.client.bulkCreate(assets as SavedObjectsBulkCreateObject[]); - const refs: AssetReference[] = response.saved_objects.map((obj: any) => { - return { - assetType: obj.type, - assetId: obj.id, - status: 'available', // Assuming a successfully created object is available - isDefaultAsset: obj.type === 'dashboard', // Assuming for now that dashboards are default - description: obj.attributes?.title, - }; - }); + const response = await this.client.bulkCreate(assets); + const refs: AssetReference[] = (response.saved_objects as SavedObject[]).map( + (obj: SavedObject) => { + return { + assetType: obj.type, + assetId: obj.id, + status: 'available', // Assuming a successfully created object is available + isDefaultAsset: obj.type === 'dashboard', // Assuming for now that dashboards are default + description: obj.attributes?.title, + }; + } + ); return Promise.resolve(refs); - } catch (err: any) { + } catch (err) { return Promise.reject(err); } } diff --git a/server/adaptors/integrations/repository/__test__/integration.test.ts b/server/adaptors/integrations/repository/__test__/integration_reader.test.ts similarity index 64% rename from server/adaptors/integrations/repository/__test__/integration.test.ts rename to server/adaptors/integrations/repository/__test__/integration_reader.test.ts index ec77acac1a..a2677dd94f 100644 --- a/server/adaptors/integrations/repository/__test__/integration.test.ts +++ b/server/adaptors/integrations/repository/__test__/integration_reader.test.ts @@ -4,7 +4,7 @@ */ import * as fs from 'fs/promises'; -import { IntegrationReader } from '../integration'; +import { IntegrationReader } from '../integration_reader'; import { Dirent, Stats } from 'fs'; import * as path from 'path'; import { TEST_INTEGRATION_CONFIG } from '../../../../../test/constants'; @@ -57,7 +57,7 @@ describe('Integration', () => { const result = await integration.getConfig(); expect(spy).toHaveBeenCalled(); - expect(result.ok).toBe(false); + expect(result.error?.message).toContain('not a valid integration directory'); }); it('should return the parsed config template if it is valid', async () => { @@ -75,7 +75,7 @@ describe('Integration', () => { const result = await integration.getConfig(TEST_INTEGRATION_CONFIG.version); - expect(result.ok).toBe(false); + expect(result.error?.message).toBe('data/version must be string'); }); it('should return an error if the config file has syntax errors', async () => { @@ -83,7 +83,7 @@ describe('Integration', () => { const result = await integration.getConfig(TEST_INTEGRATION_CONFIG.version); - expect(result.ok).toBe(false); + expect(result.error?.message).toBe('Unable to parse file as JSON or NDJson'); }); it('should return an error if the integration config does not exist', async () => { @@ -91,104 +91,100 @@ describe('Integration', () => { const readFileMock = jest.spyOn(fs, 'readFile').mockImplementation((..._args) => { // Can't find any information on how to mock an actual file not found error, // But at least according to the current implementation this should be equivalent. - const error: any = new Error('ENOENT: File not found'); - error.code = 'ENOENT'; + const error: Error = new Error('ENOENT: File not found'); + (error as { code?: string }).code = 'ENOENT'; return Promise.reject(error); }); const result = await integration.getConfig(TEST_INTEGRATION_CONFIG.version); expect(readFileMock).toHaveBeenCalled(); - expect(result.ok).toBe(false); + expect(result.error?.message).toContain('File not found'); }); }); describe('getAssets', () => { it('should return linked saved object assets when available', async () => { - integration.getConfig = jest - .fn() - .mockResolvedValue({ ok: true, value: TEST_INTEGRATION_CONFIG }); - jest.spyOn(fs, 'readFile').mockResolvedValue('{"name":"asset1"}\n{"name":"asset2"}'); + jest + .spyOn(fs, 'readFile') + .mockResolvedValueOnce(JSON.stringify(TEST_INTEGRATION_CONFIG)) + .mockResolvedValue('{"name":"asset1"}\n{"name":"asset2"}'); const result = await integration.getAssets(TEST_INTEGRATION_CONFIG.version); expect(result.ok).toBe(true); - expect((result as any).value.savedObjects).toStrictEqual([ + expect((result as { value: { savedObjects: unknown } }).value.savedObjects).toEqual([ { name: 'asset1' }, { name: 'asset2' }, ]); }); it('should return an error if the provided version has no config', async () => { - integration.getConfig = jest.fn().mockResolvedValue({ ok: false, error: new Error() }); + jest.spyOn(fs, 'readFile').mockRejectedValueOnce(new Error('ENOENT: File not found')); + const result = await integration.getAssets(); - expect(integration.getAssets()).resolves.toHaveProperty('ok', false); + expect(result.error?.message).toContain('File not found'); }); it('should return an error if the saved object assets are invalid', async () => { - integration.getConfig = jest - .fn() - .mockResolvedValue({ ok: true, value: TEST_INTEGRATION_CONFIG }); - jest.spyOn(fs, 'readFile').mockResolvedValue('{"unclosed":'); + jest + .spyOn(fs, 'readFile') + .mockResolvedValueOnce(JSON.stringify(TEST_INTEGRATION_CONFIG)) + .mockResolvedValue('{"unclosed":'); const result = await integration.getAssets(TEST_INTEGRATION_CONFIG.version); - expect(result.ok).toBe(false); + expect(result.error?.message).toBe('Unable to parse file as JSON or NDJson'); }); }); describe('getSchemas', () => { it('should retrieve mappings and schemas for all components in the config', async () => { const sampleConfig = { + ...TEST_INTEGRATION_CONFIG, components: [ - { name: 'component1', version: '1.0.0' }, + { name: 'logs', version: '1.0.0' }, { name: 'component2', version: '2.0.0' }, ], }; - integration.getConfig = jest.fn().mockResolvedValue({ ok: true, value: sampleConfig }); - - const mappingFile1 = 'component1-1.0.0.mapping.json'; - const mappingFile2 = 'component2-2.0.0.mapping.json'; jest .spyOn(fs, 'readFile') + .mockResolvedValueOnce(JSON.stringify(sampleConfig)) .mockResolvedValueOnce(JSON.stringify({ mapping: 'mapping1' })) .mockResolvedValueOnce(JSON.stringify({ mapping: 'mapping2' })); const result = await integration.getSchemas(); - expect(result.ok).toBe(true); - expect((result as any).value).toStrictEqual({ + expect(result).toMatchObject({ ok: true }); + expect((result as { value: unknown }).value).toStrictEqual({ mappings: { - component1: { mapping: 'mapping1' }, + logs: { mapping: 'mapping1' }, component2: { mapping: 'mapping2' }, }, }); - - expect(fs.readFile).toHaveBeenCalledWith( - path.join(integration.directory, 'schemas', mappingFile1), - { encoding: 'utf-8' } - ); - expect(fs.readFile).toHaveBeenCalledWith( - path.join(integration.directory, 'schemas', mappingFile2), - { encoding: 'utf-8' } - ); }); it('should reject with an error if the config is invalid', async () => { - integration.getConfig = jest.fn().mockResolvedValue({ ok: false, error: new Error() }); + jest.spyOn(fs, 'readFile').mockResolvedValueOnce( + JSON.stringify({ + ...TEST_INTEGRATION_CONFIG, + name: undefined, + }) + ); + const result = await integration.getSchemas(); - await expect(integration.getSchemas()).resolves.toHaveProperty('ok', false); + expect(result.error?.message).toBe("data must have required property 'name'"); }); it('should reject with an error if a mapping file is invalid', async () => { - const sampleConfig = { - components: [{ name: 'component1', version: '1.0.0' }], - }; - integration.getConfig = jest.fn().mockResolvedValue({ ok: true, value: sampleConfig }); - jest.spyOn(fs, 'readFile').mockRejectedValueOnce(new Error('Could not load schema')); + jest + .spyOn(fs, 'readFile') + .mockResolvedValueOnce(JSON.stringify(TEST_INTEGRATION_CONFIG)) + .mockRejectedValueOnce(new Error('Could not load schema')); - await expect(integration.getSchemas()).resolves.toHaveProperty('ok', false); + const result = await integration.getSchemas(); + expect(result.error?.message).toBe('Could not load schema'); }); }); @@ -201,52 +197,72 @@ describe('Integration', () => { const result = await integration.getStatic('logo.png'); expect(result.ok).toBe(true); - expect((result as any).value).toStrictEqual(Buffer.from('logo data', 'ascii')); + expect((result as { value: unknown }).value).toStrictEqual(Buffer.from('logo data', 'ascii')); expect(readFileMock).toBeCalledWith(path.join('sample', 'static', 'logo.png')); }); it('should return an error if the static file is not found', async () => { jest.spyOn(fs, 'readFile').mockImplementation((..._args) => { - const error: any = new Error('ENOENT: File not found'); - error.code = 'ENOENT'; + const error: Error = new Error('ENOENT: File not found'); + (error as { code?: string }).code = 'ENOENT'; return Promise.reject(error); }); - expect(integration.getStatic('/logo.png')).resolves.toHaveProperty('ok', false); + const result = await integration.getStatic('/logo.png'); + await expect(result.error?.message).toContain('File not found'); }); }); describe('getSampleData', () => { it('should return sample data', async () => { - const sampleConfig = { sampleData: { path: 'sample.json' } }; - integration.getConfig = jest.fn().mockResolvedValue({ ok: true, value: sampleConfig }); - const readFileMock = jest.spyOn(fs, 'readFile').mockResolvedValue('[{"sample": true}]'); + const readFileMock = jest + .spyOn(fs, 'readFile') + .mockResolvedValueOnce( + JSON.stringify({ + ...TEST_INTEGRATION_CONFIG, + sampleData: { + path: 'sample.json', + }, + }) + ) + .mockResolvedValue('[{"sample": true}]'); const result = await integration.getSampleData(); - expect(result.ok).toBe(true); - expect((result as any).value.sampleData).toStrictEqual([{ sample: true }]); + expect(result.value).toStrictEqual({ sampleData: [{ sample: true }] }); expect(readFileMock).toBeCalledWith(path.join('sample', 'data', 'sample.json'), { encoding: 'utf-8', }); }); it("should return null if there's no sample data", async () => { - integration.getConfig = jest.fn().mockResolvedValue({ ok: true, value: {} }); + jest.spyOn(fs, 'readFile').mockResolvedValueOnce( + JSON.stringify({ + ...TEST_INTEGRATION_CONFIG, + }) + ); const result = await integration.getSampleData(); expect(result.ok).toBe(true); - expect((result as any).value.sampleData).toBeNull(); + expect((result as { value: { sampleData: unknown } }).value.sampleData).toBeNull(); }); it('should catch and fail gracefully on invalid sample data', async () => { - const sampleConfig = { sampleData: { path: 'sample.json' } }; - integration.getConfig = jest.fn().mockResolvedValue({ ok: true, value: sampleConfig }); - jest.spyOn(fs, 'readFile').mockResolvedValue('[{"closingBracket": false]'); + jest + .spyOn(fs, 'readFile') + .mockResolvedValueOnce( + JSON.stringify({ + ...TEST_INTEGRATION_CONFIG, + sampleData: { + path: 'sample.json', + }, + }) + ) + .mockResolvedValue('[{"closingBracket": false]'); const result = await integration.getSampleData(); - expect(result.ok).toBe(false); + expect(result.error?.message).toBe('Unable to parse file as JSON or NDJson'); }); }); }); diff --git a/server/adaptors/integrations/repository/__test__/json_data_adaptor.test.ts b/server/adaptors/integrations/repository/__test__/json_data_adaptor.test.ts new file mode 100644 index 0000000000..8c703b7516 --- /dev/null +++ b/server/adaptors/integrations/repository/__test__/json_data_adaptor.test.ts @@ -0,0 +1,115 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +import { TemplateManager } from '../repository'; +import { IntegrationReader } from '../integration_reader'; +import path from 'path'; +import { JsonCatalogDataAdaptor } from '../json_data_adaptor'; +import { TEST_INTEGRATION_CONFIG } from '../../../../../test/constants'; + +// Simplified catalog for integration searching -- Do not use for full deserialization tests. +const TEST_CATALOG_NO_SERIALIZATION: SerializedIntegration[] = [ + { + ...(TEST_INTEGRATION_CONFIG as SerializedIntegration), + name: 'sample1', + }, + { + ...(TEST_INTEGRATION_CONFIG as SerializedIntegration), + name: 'sample2', + }, + { + ...(TEST_INTEGRATION_CONFIG as SerializedIntegration), + name: 'sample2', + version: '2.1.0', + }, +]; + +describe('JSON Data Adaptor', () => { + it('Should be able to deserialize a serialized integration', async () => { + const repository: TemplateManager = new TemplateManager( + path.join(__dirname, '../../__data__/repository') + ); + const fsIntegration: IntegrationReader = (await repository.getIntegration('nginx'))!; + const fsConfig = await fsIntegration.getConfig(); + const serialized = await fsIntegration.serialize(); + + expect(serialized.ok).toBe(true); + + const adaptor: JsonCatalogDataAdaptor = new JsonCatalogDataAdaptor([ + (serialized as { value: SerializedIntegration }).value, + ]); + const jsonIntegration = new IntegrationReader('nginx', adaptor); + + await expect(jsonIntegration.getConfig()).resolves.toMatchObject(fsConfig); + }); + + it('Should filter its list on join', async () => { + const adaptor = new JsonCatalogDataAdaptor(TEST_CATALOG_NO_SERIALIZATION); + const joined = await adaptor.join('sample1'); + expect(joined.integrationsList).toHaveLength(1); + }); + + it('Should correctly identify repository type', async () => { + const adaptor = new JsonCatalogDataAdaptor(TEST_CATALOG_NO_SERIALIZATION); + await expect(adaptor.getDirectoryType()).resolves.toBe('repository'); + }); + + it('Should correctly identify integration type after filtering', async () => { + const adaptor = new JsonCatalogDataAdaptor(TEST_CATALOG_NO_SERIALIZATION); + const joined = await adaptor.join('sample1'); + await expect(joined.getDirectoryType()).resolves.toBe('integration'); + }); + + it('Should correctly retrieve integration versions', async () => { + const adaptor = new JsonCatalogDataAdaptor(TEST_CATALOG_NO_SERIALIZATION); + const versions = await adaptor.findIntegrationVersions('sample2'); + expect((versions as { value: string[] }).value).toHaveLength(2); + }); + + it('Should correctly supply latest integration version for IntegrationReader', async () => { + const adaptor = new JsonCatalogDataAdaptor(TEST_CATALOG_NO_SERIALIZATION); + const reader = new IntegrationReader('sample2', adaptor.join('sample2')); + const version = await reader.getLatestVersion(); + expect(version).toBe('2.1.0'); + }); + + it('Should find integration names', async () => { + const adaptor = new JsonCatalogDataAdaptor(TEST_CATALOG_NO_SERIALIZATION); + const integResult = await adaptor.findIntegrations(); + const integs = (integResult as { value: string[] }).value; + integs.sort(); + + expect(integs).toEqual(['sample1', 'sample2']); + }); + + it('Should reject any attempts to read a file with a type', async () => { + const adaptor = new JsonCatalogDataAdaptor(TEST_CATALOG_NO_SERIALIZATION); + const result = await adaptor.readFile('logs-1.0.0.json', 'schemas'); + await expect(result.error?.message).toBe( + 'JSON adaptor does not support subtypes (isConfigLocalized: true)' + ); + }); + + it('Should reject any attempts to read a raw file', async () => { + const adaptor = new JsonCatalogDataAdaptor(TEST_CATALOG_NO_SERIALIZATION); + const result = await adaptor.readFileRaw('logo.svg', 'static'); + await expect(result.error?.message).toBe( + 'JSON adaptor does not support raw files (isConfigLocalized: true)' + ); + }); + + it('Should reject nested directory searching', async () => { + const adaptor = new JsonCatalogDataAdaptor(TEST_CATALOG_NO_SERIALIZATION); + const result = await adaptor.findIntegrations('sample1'); + await expect(result.error?.message).toBe( + 'Finding integrations for custom dirs not supported for JSONreader' + ); + }); + + it('Should report unknown directory type if integration list is empty', async () => { + const adaptor = new JsonCatalogDataAdaptor([]); + await expect(adaptor.getDirectoryType()).resolves.toBe('unknown'); + }); +}); diff --git a/server/adaptors/integrations/repository/__test__/repository.test.ts b/server/adaptors/integrations/repository/__test__/repository.test.ts index ea5c853c66..816b44eaa0 100644 --- a/server/adaptors/integrations/repository/__test__/repository.test.ts +++ b/server/adaptors/integrations/repository/__test__/repository.test.ts @@ -5,7 +5,7 @@ import * as fs from 'fs/promises'; import { TemplateManager } from '../repository'; -import { IntegrationReader } from '../integration'; +import { IntegrationReader } from '../integration_reader'; import { Dirent, Stats } from 'fs'; import path from 'path'; @@ -28,7 +28,7 @@ describe('Repository', () => { jest.spyOn(fs, 'lstat').mockResolvedValue({ isDirectory: () => true } as Stats); jest .spyOn(IntegrationReader.prototype, 'getConfig') - .mockResolvedValue({ ok: true, value: {} as any }); + .mockResolvedValue({ ok: true, value: ({} as unknown) as IntegrationConfig }); const integrations = await repository.getIntegrationList(); @@ -51,7 +51,7 @@ describe('Repository', () => { jest .spyOn(IntegrationReader.prototype, 'getConfig') - .mockResolvedValue({ ok: true, value: {} as any }); + .mockResolvedValue({ ok: true, value: ({} as unknown) as IntegrationConfig }); const integrations = await repository.getIntegrationList(); @@ -73,7 +73,7 @@ describe('Repository', () => { jest.spyOn(fs, 'lstat').mockResolvedValue({ isDirectory: () => true } as Stats); jest .spyOn(IntegrationReader.prototype, 'getConfig') - .mockResolvedValue({ ok: true, value: {} as any }); + .mockResolvedValue({ ok: true, value: ({} as unknown) as IntegrationConfig }); const integration = await repository.getIntegration('integrationName'); diff --git a/server/adaptors/integrations/repository/__test__/utils.test.ts b/server/adaptors/integrations/repository/__test__/utils.test.ts new file mode 100644 index 0000000000..d9acb440e8 --- /dev/null +++ b/server/adaptors/integrations/repository/__test__/utils.test.ts @@ -0,0 +1,35 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +import { foldResults } from '../utils'; + +describe('foldResults', () => { + it('should return an empty array result if input array is empty', () => { + const results: Array> = []; + const result = foldResults(results); + expect(result).toEqual({ ok: true, value: [] }); + }); + + it('should fold results into a single array if all input results are ok', () => { + const results: Array> = [ + { ok: true, value: 1 }, + { ok: true, value: 2 }, + { ok: true, value: 3 }, + ]; + const result = foldResults(results); + expect(result).toEqual({ ok: true, value: [1, 2, 3] }); + }); + + it('should return the first error result encountered if any results are not ok', () => { + const results: Array> = [ + { ok: true, value: 1 }, + { ok: false, error: new Error('Error 1') }, + { ok: true, value: 3 }, + { ok: false, error: new Error('Error 2') }, + ]; + const result = foldResults(results); + expect(result).toEqual({ ok: false, error: new Error('Error 1') }); + }); +}); diff --git a/server/adaptors/integrations/repository/catalog_data_adaptor.ts b/server/adaptors/integrations/repository/catalog_data_adaptor.ts index 6373fee4d4..8dd3c8c616 100644 --- a/server/adaptors/integrations/repository/catalog_data_adaptor.ts +++ b/server/adaptors/integrations/repository/catalog_data_adaptor.ts @@ -3,9 +3,16 @@ * SPDX-License-Identifier: Apache-2.0 */ -type IntegrationPart = 'assets' | 'data' | 'schemas' | 'static'; +// Matches the subdirectories of the File System serialization, used in the shipped catalog. +// Generally corresponds to each section of linked assets in the Integration Config format. +// This is helpful for asset location in non-localized config formats. +export type IntegrationPart = 'assets' | 'data' | 'schemas' | 'static'; + +export interface CatalogDataAdaptor { + // Indicates whether the config for this type of adaptor should be localized. + // If true, the catalog supports reading serialized integrations. + isConfigLocalized: boolean; -interface CatalogDataAdaptor { /** * Reads a Json or NDJson file from the data source. * @@ -33,7 +40,7 @@ interface CatalogDataAdaptor { findIntegrations: (dirname?: string) => Promise>; /** - * Reads the contents of an integration version to find available versions. + * Reads the contents of an integration directory to find available versions. * * @param dirname The name of the directory to read. * @returns A Promise that resolves with an array of filenames within the directory. diff --git a/server/adaptors/integrations/repository/fs_data_adaptor.ts b/server/adaptors/integrations/repository/fs_data_adaptor.ts index d39a0de889..52c5dff6d5 100644 --- a/server/adaptors/integrations/repository/fs_data_adaptor.ts +++ b/server/adaptors/integrations/repository/fs_data_adaptor.ts @@ -5,48 +5,8 @@ import * as fs from 'fs/promises'; import path from 'path'; - -/** - * Helper function to compare version numbers. - * Assumes that the version numbers are valid, produces undefined behavior otherwise. - * - * @param a Left-hand number - * @param b Right-hand number - * @returns -1 if a > b, 1 if a < b, 0 otherwise. - */ -function compareVersions(a: string, b: string): number { - const aParts = a.split('.').map(Number.parseInt); - const bParts = b.split('.').map(Number.parseInt); - - for (let i = 0; i < Math.max(aParts.length, bParts.length); i++) { - const aValue = i < aParts.length ? aParts[i] : 0; - const bValue = i < bParts.length ? bParts[i] : 0; - - if (aValue > bValue) { - return -1; // a > b - } else if (aValue < bValue) { - return 1; // a < b - } - } - - return 0; // a == b -} - -function tryParseNDJson(content: string): object[] | null { - try { - const objects = []; - for (const line of content.split('\n')) { - if (line.trim() === '') { - // Other OSD ndjson parsers skip whitespace lines - continue; - } - objects.push(JSON.parse(line)); - } - return objects; - } catch (err: any) { - return null; - } -} +import { CatalogDataAdaptor, IntegrationPart } from './catalog_data_adaptor'; +import { tryParseNDJson } from './utils'; // Check if a location is a directory without an exception if location not found const safeIsDirectory = async (maybeDirectory: string): Promise => { @@ -59,9 +19,10 @@ const safeIsDirectory = async (maybeDirectory: string): Promise => { /** * A CatalogDataAdaptor that reads from the local filesystem. - * Used to read Integration information when the user uploads their own catalog. + * Used to read default Integrations shipped in the in-product catalog at `__data__`. */ export class FileSystemCatalogDataAdaptor implements CatalogDataAdaptor { + isConfigLocalized = false; directory: string; /** @@ -79,15 +40,15 @@ export class FileSystemCatalogDataAdaptor implements CatalogDataAdaptor { content = await fs.readFile(path.join(this.directory, type ?? '.', filename), { encoding: 'utf-8', }); - } catch (err: any) { + } catch (err) { return { ok: false, error: err }; } // First try to parse as JSON, then NDJSON, then fail. try { const parsed = JSON.parse(content); return { ok: true, value: parsed }; - } catch (err: any) { - const parsed = tryParseNDJson(content); + } catch (err) { + const parsed = await tryParseNDJson(content); if (parsed) { return { ok: true, value: parsed }; } @@ -102,7 +63,7 @@ export class FileSystemCatalogDataAdaptor implements CatalogDataAdaptor { try { const buffer = await fs.readFile(path.join(this.directory, type ?? '.', filename)); return { ok: true, value: buffer }; - } catch (err: any) { + } catch (err) { return { ok: false, error: err }; } } @@ -112,7 +73,7 @@ export class FileSystemCatalogDataAdaptor implements CatalogDataAdaptor { const integrations: string[] = []; await this.collectIntegrationsRecursive(dirname, integrations); return { ok: true, value: integrations }; - } catch (err: any) { + } catch (err) { return { ok: false, error: err }; } } @@ -140,7 +101,7 @@ export class FileSystemCatalogDataAdaptor implements CatalogDataAdaptor { const integPath = path.join(this.directory, dirname); try { files = await fs.readdir(integPath); - } catch (err: any) { + } catch (err) { return { ok: false, error: err }; } const versions: string[] = []; @@ -156,7 +117,6 @@ export class FileSystemCatalogDataAdaptor implements CatalogDataAdaptor { } } - versions.sort((a, b) => compareVersions(a, b)); return { ok: true, value: versions }; } diff --git a/server/adaptors/integrations/repository/integration.ts b/server/adaptors/integrations/repository/integration.ts deleted file mode 100644 index 83e5779aca..0000000000 --- a/server/adaptors/integrations/repository/integration.ts +++ /dev/null @@ -1,257 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -import path from 'path'; -import { validateTemplate } from '../validators'; -import { FileSystemCatalogDataAdaptor } from './fs_data_adaptor'; - -/** - * The Integration class represents the data for Integration Templates. - * It is backed by the repository file system. - * It includes accessor methods for integration configs, as well as helpers for nested components. - */ -export class IntegrationReader { - reader: CatalogDataAdaptor; - directory: string; - name: string; - - constructor(directory: string, reader?: CatalogDataAdaptor) { - this.directory = directory; - this.name = path.basename(directory); - this.reader = reader ?? new FileSystemCatalogDataAdaptor(directory); - } - - /** - * Like getConfig(), but thoroughly checks all nested integration dependencies for validity. - * - * @returns a Result indicating whether the integration is valid. - */ - async deepCheck(): Promise> { - const configResult = await this.getConfig(); - if (!configResult.ok) { - return configResult; - } - - try { - const schemas = await this.getSchemas(); - if (!schemas.ok || Object.keys(schemas.value.mappings).length === 0) { - return { ok: false, error: new Error('The integration has no schemas available') }; - } - const assets = await this.getAssets(); - if (!assets.ok || Object.keys(assets).length === 0) { - return { ok: false, error: new Error('An integration must have at least one asset') }; - } - } catch (err: any) { - return { ok: false, error: err }; - } - - return configResult; - } - - /** - * Get the latest version of the integration available. - * This method relies on the fact that integration configs have their versions in their name. - * Any files that don't match the config naming convention will be ignored. - * - * @returns A string with the latest version, or null if no versions are available. - */ - async getLatestVersion(): Promise { - const versions = await this.reader.findIntegrationVersions(); - if (!versions.ok) { - console.error(versions.error); - return null; - } - return versions.value.length > 0 ? versions.value[0] : null; - } - - /** - * Get the configuration of the current integration. - * - * @param version The version of the config to retrieve. - * @returns The config if a valid config matching the version is present, otherwise null. - */ - async getConfig(version?: string): Promise> { - if ((await this.reader.getDirectoryType()) !== 'integration') { - return { ok: false, error: new Error(`${this.directory} is not a valid integration`) }; - } - - const maybeVersion: string | null = version ? version : await this.getLatestVersion(); - - if (maybeVersion === null) { - return { - ok: false, - error: new Error(`No valid config matching version ${version} is available`), - }; - } - - const configFile = `${this.name}-${maybeVersion}.json`; - - const config = await this.reader.readFile(configFile); - if (!config.ok) { - return config; - } - return validateTemplate(config.value); - } - - /** - * Retrieve assets associated with the integration. - * This method greedily retrieves all assets. - * If the version is invalid, an error is thrown. - * If an asset is invalid, it will be skipped. - * - * @param version The version of the integration to retrieve assets for. - * @returns An object containing the different types of assets. - */ - async getAssets( - version?: string - ): Promise< - Result<{ - savedObjects?: object[]; - queries?: Array<{ - query: string; - language: string; - }>; - }> - > { - const configResult = await this.getConfig(version); - if (!configResult.ok) { - return configResult; - } - const config = configResult.value; - - const resultValue: { - savedObjects?: object[]; - queries?: Array<{ query: string; language: string }>; - } = {}; - if (config.assets.savedObjects) { - const sobjPath = `${config.assets.savedObjects.name}-${config.assets.savedObjects.version}.ndjson`; - const assets = await this.reader.readFile(sobjPath, 'assets'); - if (!assets.ok) { - return assets; - } - resultValue.savedObjects = assets.value as object[]; - } - if (config.assets.queries) { - resultValue.queries = []; - const queries = await Promise.all( - config.assets.queries.map(async (item) => { - const queryPath = `${item.name}-${item.version}.${item.language}`; - const query = await this.reader.readFileRaw(queryPath, 'assets'); - if (!query.ok) { - return query; - } - return { - ok: true as const, - value: { - language: item.language, - query: query.value.toString('utf8'), - }, - }; - }) - ); - for (const query of queries) { - if (!query.ok) { - return query; - } - resultValue.queries.push(query.value); - } - } - return { ok: true, value: resultValue }; - } - - /** - * Retrieve sample data associated with the integration. - * If the version is invalid, an error is thrown. - * If the sample data is invalid, null will be returned - * - * @param version The version of the integration to retrieve assets for. - * @returns An object containing a list of sample data with adjusted timestamps. - */ - async getSampleData( - version?: string - ): Promise< - Result<{ - sampleData: object[] | null; - }> - > { - const configResult = await this.getConfig(version); - if (!configResult.ok) { - return configResult; - } - const config = configResult.value; - - const resultValue: { sampleData: object[] | null } = { sampleData: null }; - if (config.sampleData) { - const jsonContent = await this.reader.readFile(config.sampleData.path, 'data'); - if (!jsonContent.ok) { - return jsonContent; - } - for (const value of jsonContent.value as object[]) { - if (!('@timestamp' in value)) { - continue; - } - // Randomly scatter timestamps across last 10 minutes - // Assume for now that the ordering of events isn't important, can change to a sequence if needed - // Also doesn't handle fields like `observedTimestamp` if present - const newTime = new Date( - Date.now() - Math.floor(Math.random() * 1000 * 60 * 10) - ).toISOString(); - Object.assign(value, { '@timestamp': newTime }); - if ('observedTimestamp' in value) { - Object.assign(value, { observedTimestamp: newTime }); - } - } - resultValue.sampleData = jsonContent.value as object[]; - } - return { ok: true, value: resultValue }; - } - - /** - * Retrieve schema data associated with the integration. - * This method greedily retrieves all mappings and schemas. - * It's assumed that a valid version will be provided. - * If the version is invalid, an error is thrown. - * If a schema is invalid, an error will be thrown. - * - * @param version The version of the integration to retrieve assets for. - * @returns An object containing the different types of assets. - */ - async getSchemas( - version?: string - ): Promise< - Result<{ - mappings: { [key: string]: any }; - }> - > { - const configResult = await this.getConfig(version); - if (!configResult.ok) { - return configResult; - } - const config = configResult.value; - - const resultValue: { mappings: { [key: string]: object } } = { - mappings: {}, - }; - for (const component of config.components) { - const schemaFile = `${component.name}-${component.version}.mapping.json`; - const schema = await this.reader.readFile(schemaFile, 'schemas'); - if (!schema.ok) { - return schema; - } - resultValue.mappings[component.name] = schema.value; - } - return { ok: true, value: resultValue }; - } - - /** - * Retrieves the data for a static file associated with the integration. - * - * @param staticPath The path of the static to retrieve. - * @returns A buffer with the static's data if present, otherwise null. - */ - async getStatic(staticPath: string): Promise> { - return await this.reader.readFileRaw(staticPath, 'static'); - } -} diff --git a/server/adaptors/integrations/repository/integration_reader.ts b/server/adaptors/integrations/repository/integration_reader.ts new file mode 100644 index 0000000000..0f28c5d420 --- /dev/null +++ b/server/adaptors/integrations/repository/integration_reader.ts @@ -0,0 +1,522 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +import path from 'path'; +import semver from 'semver'; +import { validateTemplate } from '../validators'; +import { FileSystemCatalogDataAdaptor } from './fs_data_adaptor'; +import { CatalogDataAdaptor, IntegrationPart } from './catalog_data_adaptor'; +import { foldResults, pruneConfig } from './utils'; + +/** + * The Integration class represents the data for Integration Templates. + * It is backed by the repository file system. + * It includes accessor methods for integration configs, as well as helpers for nested components. + */ +export class IntegrationReader { + reader: CatalogDataAdaptor; + directory: string; + name: string; + + constructor(directory: string, reader?: CatalogDataAdaptor) { + this.directory = directory; + this.name = path.basename(directory); + this.reader = reader ?? new FileSystemCatalogDataAdaptor(directory); + } + + /** + * Retrieve data from correct source regardless of if reader is config-localized or not. + * + * TODO refactor to assemble filename from `type` instead of requiring caller to format it. + * + * @param item An item which may have data in it. + * @param fileParams Information about the file to read if the config is not localized. + * @param format How to package the returned data. + * If 'json', return `object | object[]`. If 'binary', return `Buffer`. + * @returns A result with the data, with a format based on the format field. + */ + private async fetchDataOrReadFile( + item: { data?: string }, + fileParams: { filename: string; type?: IntegrationPart }, + format: 'json' + ): Promise>; + private async fetchDataOrReadFile( + item: { data?: string }, + fileParams: { filename: string; type?: IntegrationPart }, + format: 'binary' + ): Promise>; + private async fetchDataOrReadFile( + item: { data?: string }, + fileParams: { filename: string; type?: IntegrationPart }, + format: 'json' | 'binary' + ): Promise> { + if (this.reader.isConfigLocalized) { + if (!item.data) { + return { + ok: false, + error: new Error( + 'The config for the provided reader is localized, but no data field is present. ' + + JSON.stringify(item) + ), + }; + } + try { + if (format === 'json') { + return { ok: true, value: JSON.parse(item.data) }; + } else { + return { ok: true, value: Buffer.from(item.data, 'base64') }; + } + } catch (error) { + return { ok: false, error }; + } + } + + if (format === 'json') { + return this.reader.readFile(fileParams.filename, fileParams.type); + } else { + return this.reader.readFileRaw(fileParams.filename, fileParams.type); + } + } + + /** + * Get the latest version of the integration available. + * This method relies on the fact that integration configs have their versions in their name. + * Any files that don't match the config naming convention will be ignored. + * + * @returns A string with the latest version, or null if no versions are available. + */ + async getLatestVersion(): Promise { + const versions = await this.reader.findIntegrationVersions(); + if (!versions.ok) { + return null; + } + if (versions.value.length === 0) { + return null; + } + // Sort descending + versions.value.sort(semver.rcompare); + return versions.value[0]; + } + + // Get config without pruning or validation. + private async getRawConfig( + version?: string + ): Promise> { + if ((await this.reader.getDirectoryType()) !== 'integration') { + return { + ok: false, + error: new Error(`${this.directory} is not a valid integration directory`), + }; + } + + const maybeVersion: string | null = version ? version : await this.getLatestVersion(); + + if (maybeVersion === null) { + return { + ok: false, + error: new Error(`No valid config matching version ${version} is available`), + }; + } + + const configFile = `${this.name}-${maybeVersion}.json`; + + // Even config-localized readers must support config-read. + const config = await this.reader.readFile(configFile); + if (!config.ok) { + return config; + } + return validateTemplate(config.value); + } + + /** + * Get the configuration of the current integration. + * + * @param version The version of the config to retrieve. + * @returns The config if a valid config matching the version is present, otherwise null. + */ + async getConfig(version?: string): Promise> { + const maybeConfig = await this.getRawConfig(version); + if (!maybeConfig.ok) { + return maybeConfig; + } + return validateTemplate(pruneConfig(maybeConfig.value)); + } + + private async getQueries( + queriesList: Array<{ name: string; version: string; language: string; data?: string }> + ): Promise>> { + const queries = await Promise.all( + queriesList.map(async (item) => { + const query = await this.fetchDataOrReadFile( + item, + { filename: `${item.name}-${item.version}.${item.language}`, type: 'assets' }, + 'binary' + ); + if (!query.ok) { + return query; + } + return { + ok: true as const, + value: { + language: item.language, + query: query.value.toString('utf8'), + }, + }; + }) + ); + return foldResults(queries); + } + + /** + * Retrieve assets associated with the integration. + * This method greedily retrieves all assets. + * If the version is invalid, an error is thrown. + * If an asset is invalid, it will be skipped. + * + * @param version The version of the integration to retrieve assets for. + * @returns An object containing the different types of assets. + */ + async getAssets( + version?: string + ): Promise< + Result<{ + savedObjects?: object[]; + queries?: Array<{ + query: string; + language: string; + }>; + }> + > { + const configResult = await this.getRawConfig(version); + if (!configResult.ok) { + return configResult; + } + const config = configResult.value; + + const resultValue: { + savedObjects?: object[]; + queries?: Array<{ query: string; language: string }>; + } = {}; + if (config.assets.savedObjects) { + const assets = await this.fetchDataOrReadFile( + config.assets.savedObjects as { data?: string }, + { + filename: `${config.assets.savedObjects.name}-${config.assets.savedObjects.version}.ndjson`, + type: 'assets', + }, + 'json' + ); + if (!assets.ok) { + return assets; + } + resultValue.savedObjects = assets.value as object[]; + } + if (config.assets.queries) { + const queries = await this.getQueries(config.assets.queries); + if (!queries.ok) { + return queries; + } + resultValue.queries = queries.value; + } + return { ok: true, value: resultValue }; + } + + /** + * Retrieve sample data associated with the integration. + * If the version is invalid, an error is thrown. + * If the sample data is invalid, null will be returned + * + * @param version The version of the integration to retrieve assets for. + * @returns An object containing a list of sample data with adjusted timestamps. + */ + async getSampleData( + version?: string + ): Promise< + Result<{ + sampleData: object[] | null; + }> + > { + const configResult = await this.getRawConfig(version); + if (!configResult.ok) { + return configResult; + } + const config = configResult.value; + + const resultValue: { sampleData: object[] | null } = { sampleData: null }; + if (config.sampleData) { + const jsonContent: Result = await this.fetchDataOrReadFile( + config.sampleData as { data?: string }, + { filename: config.sampleData.path, type: 'data' }, + 'json' + ); + if (!jsonContent.ok) { + return jsonContent; + } + for (const value of jsonContent.value as object[]) { + if (!('@timestamp' in value)) { + continue; + } + // Randomly scatter timestamps across last 10 minutes + // Assume for now that the ordering of events isn't important, can change to a sequence if needed + // Also doesn't handle fields like `observedTimestamp` if present + const newTime = new Date( + Date.now() - Math.floor(Math.random() * 1000 * 60 * 10) + ).toISOString(); + Object.assign(value, { '@timestamp': newTime }); + if ('observedTimestamp' in value) { + Object.assign(value, { observedTimestamp: newTime }); + } + } + resultValue.sampleData = jsonContent.value as object[]; + } + return { ok: true, value: resultValue }; + } + + /** + * Retrieve schema data associated with the integration. + * This method greedily retrieves all mappings and schemas. + * It's assumed that a valid version will be provided. + * If the version is invalid, an error is thrown. + * If a schema is invalid, an error will be thrown. + * + * @param version The version of the integration to retrieve assets for. + * @returns An object containing the different types of assets. + */ + async getSchemas( + version?: string + ): Promise< + Result<{ + mappings: { [key: string]: unknown }; + }> + > { + const configResult = await this.getRawConfig(version); + if (!configResult.ok) { + return configResult; + } + const config = configResult.value; + + const resultValue: { mappings: { [key: string]: object } } = { + mappings: {}, + }; + for (const component of config.components) { + const schemaFile = `${component.name}-${component.version}.mapping.json`; + const schema = await this.fetchDataOrReadFile( + component as { data?: string }, + { filename: schemaFile, type: 'schemas' }, + 'json' + ); + if (!schema.ok) { + return schema; + } + resultValue.mappings[component.name] = schema.value; + } + return { ok: true, value: resultValue }; + } + + /** + * Retrieves the data for a static file associated with the integration. + * + * @param staticPath The path of the static to retrieve. + * @returns A buffer with the static's data if present, otherwise null. + */ + async getStatic(staticPath: string): Promise> { + // Statics were originally designed to read straight from file system, + // so we use direct access if possible. + if (!this.reader.isConfigLocalized) { + return await this.reader.readFileRaw(staticPath, 'static'); + } + + // Otherwise, we need to search for the right static, by checking each version. + const versions = await this.reader.findIntegrationVersions(); + if (!versions.ok) { + return versions; + } + for (const version of versions.value) { + const config = await this.getRawConfig(version); + if (!config.ok || !config.value.statics) { + continue; + } + const statics = config.value.statics; + if (statics.logo?.path === staticPath) { + if (!('data' in statics.logo)) { + return { ok: false, error: new Error('Localized config missing static data') }; + } + return { ok: true, value: Buffer.from((statics.logo as { data: string }).data, 'base64') }; + } + if (statics?.darkModeLogo?.path === staticPath) { + if (!('data' in statics.darkModeLogo)) { + return { ok: false, error: new Error('Localized config missing static data') }; + } + return { + ok: true, + value: Buffer.from((statics.darkModeLogo as { data: string }).data, 'base64'), + }; + } + for (const iterStatic of [...(statics?.gallery ?? []), ...(statics?.darkModeGallery ?? [])]) { + if (iterStatic.path === staticPath) { + if (!('data' in iterStatic)) { + return { ok: false, error: new Error('Localized config missing static data') }; + } + return { ok: true, value: Buffer.from((iterStatic as { data: string }).data, 'base64') }; + } + } + } + + return { + ok: false, + error: new Error(`Static not found: ${staticPath}`, { code: 'ENOENT' } as ErrorOptions), + }; + } + + private async serializeStaticAsset(asset: StaticAsset): Promise> { + const data = await this.getStatic(asset.path); + if (!data.ok) { + return data; + } + + return { + ok: true, + value: { + ...asset, + data: data.value.toString('base64'), + }, + }; + } + + private async serializeStatics( + statics: IntegrationStatics + ): Promise> { + const serialized: SerializedIntegrationStatics = {}; + + if (statics.logo) { + const serializeResult = await this.serializeStaticAsset(statics.logo); + serialized.logo = serializeResult.value; + } + + if (statics.darkModeLogo) { + const serializeResult = await this.serializeStaticAsset(statics.darkModeLogo); + serialized.darkModeLogo = serializeResult.value; + } + + if (statics.gallery) { + const results = await Promise.all( + statics.gallery.map((asset) => this.serializeStaticAsset(asset)) + ); + const foldedResult = foldResults(results); + serialized.gallery = foldedResult.value; + } + + if (statics.darkModeGallery) { + const results = await Promise.all( + statics.darkModeGallery.map((asset) => this.serializeStaticAsset(asset)) + ); + const foldedResult = foldResults(results); + serialized.darkModeGallery = foldedResult.value; + } + + return { + ok: true, + value: serialized, + }; + } + + /** + * Serialize the referenced integration as a flat JSON object. + * Useful for normalizing the format for sending to other locations. + * This method implements the serialization scheme expected by `JsonCatalogDataAdaptor`. + * + * @param version The version of the integration to serialize. + * @returns A large object which includes all of the integration's data. + */ + async serialize(version?: string): Promise> { + const configResult = await this.getRawConfig(version); + if (!configResult.ok) { + return configResult; + } + + // Type cast safety: all serializable properties must have the 'data' field. + // The remainder of the method is populating all such fields. + const config = configResult.value as SerializedIntegration; + + const componentResults = await Promise.all( + config.components.map((component) => + this.fetchDataOrReadFile( + component, + { filename: `${component.name}-${component.version}.mapping.json`, type: 'schemas' }, + 'json' + ) + ) + ); + const componentsResult = foldResults(componentResults); + if (!componentsResult.ok) { + return componentsResult; + } + config.components = config.components.map((component, idx) => { + return { + ...component, + data: JSON.stringify(componentsResult.value[idx]), + }; + }); + + if (config.assets.savedObjects) { + const soMetadata = config.assets.savedObjects; + const soResult = await this.fetchDataOrReadFile( + config.assets.savedObjects, + { + filename: `${soMetadata.name}-${soMetadata.version}.ndjson`, + type: 'assets', + }, + 'json' + ); + if (!soResult.ok) { + return soResult; + } + config.assets.savedObjects = { ...soMetadata, data: JSON.stringify(soResult.value) }; + } + + if (config.assets.queries) { + const queryResults = await Promise.all( + config.assets.queries.map((query) => + this.fetchDataOrReadFile( + query, + { filename: `${query.name}-${query.version}.${query.language}`, type: 'assets' }, + 'binary' + ) + ) + ); + const queriesResult = foldResults(queryResults); + if (!queriesResult.ok) { + return queriesResult; + } + config.assets.queries = config.assets.queries.map((query, idx) => { + return { + ...query, + data: JSON.stringify(queriesResult.value[idx].toString('utf8')), + }; + }); + } + + if (config.statics) { + const staticsResult = await this.serializeStatics(config.statics); + if (!staticsResult.ok) { + return staticsResult; + } + config.statics = staticsResult.value; + } + + if (config.sampleData) { + const dataResult = await this.getSampleData(version); + if (!dataResult.ok) { + return dataResult; + } + config.sampleData = { + ...config.sampleData, + data: JSON.stringify(dataResult.value.sampleData), + }; + } + + return { ok: true, value: config }; + } +} diff --git a/server/adaptors/integrations/repository/json_data_adaptor.ts b/server/adaptors/integrations/repository/json_data_adaptor.ts new file mode 100644 index 0000000000..05c0b11104 --- /dev/null +++ b/server/adaptors/integrations/repository/json_data_adaptor.ts @@ -0,0 +1,97 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +import { CatalogDataAdaptor, IntegrationPart } from './catalog_data_adaptor'; + +/** + * A CatalogDataAdaptor that reads from a provided list of JSON objects. + * Used to read Integration information when the user uploads their own catalog. + */ +export class JsonCatalogDataAdaptor implements CatalogDataAdaptor { + isConfigLocalized = true; + integrationsList: SerializedIntegration[]; + + /** + * Creates a new FileSystemCatalogDataAdaptor instance. + * + * @param directory The base directory from which to read files. This is not sanitized. + */ + constructor(integrationsList: SerializedIntegration[]) { + this.integrationsList = integrationsList; + } + + async findIntegrationVersions(dirname?: string | undefined): Promise> { + const versions: string[] = []; + for (const integration of this.integrationsList) { + if (dirname && integration.name !== dirname) { + continue; + } + versions.push(integration.version); + } + return { ok: true, value: versions }; + } + + async readFile(filename: string, type?: IntegrationPart): Promise> { + if (type !== undefined) { + return { + ok: false, + error: new Error('JSON adaptor does not support subtypes (isConfigLocalized: true)'), + }; + } + + const name = filename.split('-')[0]; + const version = filename.match(/\d+(\.\d+)*/); + for (const integ of this.integrationsList) { + if (integ.name === name && integ.version === version?.[0]) { + return { ok: true, value: integ }; + } + } + return { ok: false, error: new Error('Config file not found: ' + filename) }; + } + + async readFileRaw(_filename: string, _type?: IntegrationPart): Promise> { + return { + ok: false, + error: new Error('JSON adaptor does not support raw files (isConfigLocalized: true)'), + }; + } + + async findIntegrations(dirname: string = '.'): Promise> { + if (dirname !== '.') { + return { + ok: false, + error: new Error('Finding integrations for custom dirs not supported for JSONreader'), + }; + } + const result: Set = new Set([]); + for (const integration of this.integrationsList) { + result.add(integration.name); + } + return { ok: true, value: [...result] }; + } + + async getDirectoryType(dirname?: string): Promise<'integration' | 'repository' | 'unknown'> { + // First, filter list by dirname if available + const integrationsList = dirname + ? this.integrationsList.filter((i) => i.name === dirname) + : this.integrationsList; + if (integrationsList.length === 0) { + return 'unknown'; + } + // The list is an integration iff all of its names match + for (let i = 0; i < integrationsList.length - 1; i++) { + if (integrationsList[i].name !== integrationsList[i + 1].name) { + return 'repository'; + } + } + return 'integration'; + } + + join(filename: string): JsonCatalogDataAdaptor { + // In other adaptors, joining moves from directories to integrations. + // Since for JSON catalogs we use a flat structure, we just filter. + return new JsonCatalogDataAdaptor(this.integrationsList.filter((i) => i.name === filename)); + } +} diff --git a/server/adaptors/integrations/repository/repository.ts b/server/adaptors/integrations/repository/repository.ts index ca56767b0d..0337372049 100644 --- a/server/adaptors/integrations/repository/repository.ts +++ b/server/adaptors/integrations/repository/repository.ts @@ -4,8 +4,9 @@ */ import * as path from 'path'; -import { IntegrationReader } from './integration'; +import { IntegrationReader } from './integration_reader'; import { FileSystemCatalogDataAdaptor } from './fs_data_adaptor'; +import { CatalogDataAdaptor } from './catalog_data_adaptor'; export class TemplateManager { reader: CatalogDataAdaptor; diff --git a/server/adaptors/integrations/repository/utils.ts b/server/adaptors/integrations/repository/utils.ts new file mode 100644 index 0000000000..b9ee28c7a5 --- /dev/null +++ b/server/adaptors/integrations/repository/utils.ts @@ -0,0 +1,95 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +import { Readable } from 'stream'; +import { createSavedObjectsStreamFromNdJson } from '../../../../../../src/core/server/saved_objects/routes/utils'; +import { IntegrationReader } from './integration_reader'; + +export async function tryParseNDJson(content: string): Promise { + try { + const objects = await createSavedObjectsStreamFromNdJson(Readable.from(content)); + return await objects.toArray(); + } catch (err) { + return null; + } +} + +/** + * Check IntegrationReader nested dependencies for validity, + * as a supplement to shallow config validation. + * + * @returns a Result indicating whether the integration is valid, holding the integration's config. + */ +export async function deepCheck(reader: IntegrationReader): Promise> { + const configResult = await reader.getConfig(); + if (!configResult.ok) { + return configResult; + } + + // Deep checks not included in default config validation + const assets = await reader.getAssets(); + if (!assets.ok || Object.keys(assets.value).length === 0) { + return { ok: false, error: new Error('An integration must have at least one asset') }; + } + + return configResult; +} + +/** + * Helper method: Convert an Array> to Result>. + * + * @param results The list of results to fold. + * @returns A single result object with values in an array, or an error result. + */ +export const foldResults = (results: Array>) => + results.reduce( + (result, currentValue) => { + if (!result.ok) { + return result; + } + if (!currentValue.ok) { + return currentValue; + } + result.value.push(currentValue.value); + return result; + }, + { ok: true, value: [] } as Result + ); + +/** + * Remove all fields from SerializedIntegration not present in IntegrationConfig. + * + * @param rawConfig The raw config to prune + * @returns A config with all data fields removed + */ +export const pruneConfig = ( + rawConfig: IntegrationConfig | SerializedIntegration +): IntegrationConfig => { + // Hacky workaround: we currently only need to prune 'data' fields, so just remove every 'data'. + // Lots of risky conversion in this method, so scope it to here and rewrite if more granular + // pruning is needed. + const prunePart = (part: T): T => { + const result = {} as { [key: string]: unknown }; + for (const [key, value] of Object.entries(part as { [key: string]: unknown })) { + if (key === 'data') { + continue; + } else if (Array.isArray(value)) { + result[key] = value.map((item) => { + if (item instanceof Object && item !== null) { + return prunePart(item); + } + return item; + }); + } else if (value instanceof Object && value !== null) { + result[key] = prunePart(value as { [key: string]: unknown }); + } else { + result[key] = value; + } + } + return (result as unknown) as T; + }; + + return prunePart(rawConfig); +}; diff --git a/server/adaptors/integrations/types.ts b/server/adaptors/integrations/types.ts index fd5729afcc..5e7565a133 100644 --- a/server/adaptors/integrations/types.ts +++ b/server/adaptors/integrations/types.ts @@ -3,19 +3,9 @@ * SPDX-License-Identifier: Apache-2.0 */ -type Result = { ok: true; value: T } | { ok: false; error: E }; - -interface IntegrationAssets { - savedObjects?: { - name: string; - version: string; - }; - queries?: Array<{ - name: string; - version: string; - language: string; - }>; -} +type Result = + | { ok: true; value: T; error?: undefined } + | { ok: false; error: E; value?: undefined }; interface IntegrationConfig { name: string; @@ -27,12 +17,7 @@ interface IntegrationConfig { author?: string; description?: string; sourceUrl?: string; - statics?: { - logo?: StaticAsset; - gallery?: StaticAsset[]; - darkModeLogo?: StaticAsset; - darkModeGallery?: StaticAsset[]; - }; + statics?: IntegrationStatics; components: IntegrationComponent[]; assets: IntegrationAssets; sampleData?: { @@ -40,16 +25,75 @@ interface IntegrationConfig { }; } +// IntegrationConfig extended with local copies of all data +interface SerializedIntegration extends IntegrationConfig { + statics?: SerializedIntegrationStatics; + components: SerializedIntegrationComponent[]; + assets: SerializedIntegrationAssets; + sampleData?: { + path: string; + data: string; + }; +} + +interface IntegrationStatics { + logo?: StaticAsset; + gallery?: StaticAsset[]; + darkModeLogo?: StaticAsset; + darkModeGallery?: StaticAsset[]; +} + +interface SerializedIntegrationStatics { + logo?: SerializedStaticAsset; + gallery?: SerializedStaticAsset[]; + darkModeLogo?: SerializedStaticAsset; + darkModeGallery?: SerializedStaticAsset[]; +} + +interface IntegrationAssets { + savedObjects?: { + name: string; + version: string; + }; + queries?: Array<{ + name: string; + version: string; + language: string; + }>; +} + +interface SerializedIntegrationAssets extends IntegrationAssets { + savedObjects?: { + name: string; + version: string; + data: string; + }; + queries?: Array<{ + name: string; + version: string; + language: string; + data: string; + }>; +} + interface StaticAsset { annotation?: string; path: string; } +interface SerializedStaticAsset extends StaticAsset { + data: string; +} + interface IntegrationComponent { name: string; version: string; } +interface SerializedIntegrationComponent extends IntegrationComponent { + data: string; +} + interface DisplayAsset { body: string; } diff --git a/server/adaptors/integrations/validators.ts b/server/adaptors/integrations/validators.ts index 5aa65cf815..bd0ec482fc 100644 --- a/server/adaptors/integrations/validators.ts +++ b/server/adaptors/integrations/validators.ts @@ -12,6 +12,7 @@ const staticAsset: JSONSchemaType = { properties: { path: { type: 'string' }, annotation: { type: 'string', nullable: true }, + data: { type: 'string', nullable: true }, }, required: ['path'], additionalProperties: false, @@ -48,6 +49,7 @@ const templateSchema: JSONSchemaType = { properties: { name: { type: 'string' }, version: { type: 'string' }, + data: { type: 'string', nullable: true }, }, required: ['name', 'version'], }, @@ -60,6 +62,7 @@ const templateSchema: JSONSchemaType = { properties: { name: { type: 'string' }, version: { type: 'string' }, + data: { type: 'string', nullable: true }, }, required: ['name', 'version'], nullable: true, @@ -73,6 +76,7 @@ const templateSchema: JSONSchemaType = { name: { type: 'string' }, version: { type: 'string' }, language: { type: 'string' }, + data: { type: 'string', nullable: true }, }, required: ['name', 'version', 'language'], }, @@ -84,9 +88,8 @@ const templateSchema: JSONSchemaType = { sampleData: { type: 'object', properties: { - path: { - type: 'string', - }, + path: { type: 'string' }, + data: { type: 'string', nullable: true }, }, required: ['path'], additionalProperties: false,