Skip to content
Merged
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import { createAgentInput } from './agent';
import { createPipeline } from './pipeline';
import { DataStream, Docs, InputType, Pipeline, Integration } from '../../common';
import yaml from 'js-yaml';
import { createReadme } from './readme_files';

const mockedDataPath = 'path';
const mockedId = 123;
Expand All @@ -23,6 +24,10 @@ jest.mock('./data_stream');
jest.mock('./fields');
jest.mock('./agent');
jest.mock('./pipeline');
jest.mock('./readme_files');

(createFieldMapping as jest.Mock).mockReturnValue([]);
(createDataStream as jest.Mock).mockReturnValue([]);

(generateUniqueId as jest.Mock).mockReturnValue(mockedId);

Expand Down Expand Up @@ -106,22 +111,11 @@ describe('buildPackage', () => {

// _dev files
expect(ensureDirSync).toHaveBeenCalledWith(`${integrationPath}/_dev/build`);
expect(createSync).toHaveBeenCalledWith(
`${integrationPath}/_dev/build/docs/README.md`,
expect.any(String)
);
expect(createSync).toHaveBeenCalledWith(
`${integrationPath}/_dev/build/build.yml`,
expect.any(String)
);

// Docs files
expect(ensureDirSync).toHaveBeenCalledWith(`${integrationPath}/docs/`);
expect(createSync).toHaveBeenCalledWith(
`${integrationPath}/docs/README.md`,
expect.any(String)
);

// Changelog file
expect(createSync).toHaveBeenCalledWith(`${integrationPath}/changelog.yml`, expect.any(String));

Expand Down Expand Up @@ -188,6 +182,52 @@ describe('buildPackage', () => {
secondDataStreamDocs
);
});

it('Should call createReadme once with sorted fields', async () => {
jest.clearAllMocks();

const firstDSFieldsMapping = [{ name: 'name a', description: 'description 1', type: 'type 1' }];

const firstDataStreamFields = [
{ name: 'name b', description: 'description 1', type: 'type 1' },
];

const secondDSFieldsMapping = [
{ name: 'name c', description: 'description 2', type: 'type 2' },
{ name: 'name e', description: 'description 3', type: 'type 3' },
];

const secondDataStreamFields = [
{ name: 'name d', description: 'description 2', type: 'type 2' },
];

(createFieldMapping as jest.Mock).mockReturnValueOnce(firstDSFieldsMapping);
(createDataStream as jest.Mock).mockReturnValueOnce(firstDataStreamFields);

(createFieldMapping as jest.Mock).mockReturnValueOnce(secondDSFieldsMapping);
(createDataStream as jest.Mock).mockReturnValueOnce(secondDataStreamFields);

await buildPackage(testIntegration);

expect(createReadme).toHaveBeenCalledWith(integrationPath, testIntegration.name, [
{
datastream: firstDatastreamName,
fields: [
{ name: 'name a', description: 'description 1', type: 'type 1' },

{ name: 'name b', description: 'description 1', type: 'type 1' },
],
},
{
datastream: secondDatastreamName,
fields: [
{ name: 'name c', description: 'description 2', type: 'type 2' },
{ name: 'name d', description: 'description 2', type: 'type 2' },
{ name: 'name e', description: 'description 3', type: 'type 3' },
],
},
]);
});
});

describe('renderPackageManifestYAML', () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ import { createAgentInput } from './agent';
import { createDataStream } from './data_stream';
import { createFieldMapping } from './fields';
import { createPipeline } from './pipeline';
import { createReadme } from './readme_files';
import { Field, flattenObjectsList } from '../util/samples';

const initialVersion = '1.0.0';

Expand All @@ -37,17 +39,27 @@ export async function buildPackage(integration: Integration): Promise<Buffer> {
const packageDir = createDirectories(workingDir, integration, packageDirectoryName);

const dataStreamsDir = joinPath(packageDir, 'data_stream');

for (const dataStream of integration.dataStreams) {
const fieldsPerDatastream = integration.dataStreams.map((dataStream) => {
const dataStreamName = dataStream.name;
const specificDataStreamDir = joinPath(dataStreamsDir, dataStreamName);

createDataStream(integration.name, specificDataStreamDir, dataStream);
const dataStreamFields = createDataStream(integration.name, specificDataStreamDir, dataStream);
createAgentInput(specificDataStreamDir, dataStream.inputTypes);
createPipeline(specificDataStreamDir, dataStream.pipeline);
createFieldMapping(integration.name, dataStreamName, specificDataStreamDir, dataStream.docs);
}
const fields = createFieldMapping(
integration.name,
dataStreamName,
specificDataStreamDir,
dataStream.docs
);

return {
datastream: dataStreamName,
fields: mergeAndSortFields(fields, dataStreamFields),
};
});

createReadme(packageDir, integration.name, fieldsPerDatastream);
const zipBuffer = await createZipArchive(workingDir, packageDirectoryName);

removeDirSync(workingDir);
Expand All @@ -67,7 +79,6 @@ function createDirectories(
}

function createPackage(packageDir: string, integration: Integration): void {
createReadme(packageDir, integration);
createChangelog(packageDir);
createBuildFile(packageDir);
createPackageManifest(packageDir, integration);
Expand Down Expand Up @@ -102,20 +113,6 @@ function createChangelog(packageDir: string): void {
createSync(joinPath(packageDir, 'changelog.yml'), changelogTemplate);
}

function createReadme(packageDir: string, integration: Integration) {
const readmeDirPath = joinPath(packageDir, '_dev/build/docs/');
const mainReadmeDirPath = joinPath(packageDir, 'docs/');
ensureDirSync(mainReadmeDirPath);
ensureDirSync(readmeDirPath);
const readmeTemplate = nunjucks.render('package_readme.md.njk', {
package_name: integration.name,
data_streams: integration.dataStreams,
});

createSync(joinPath(readmeDirPath, 'README.md'), readmeTemplate);
createSync(joinPath(mainReadmeDirPath, 'README.md'), readmeTemplate);
}

async function createZipArchive(workingDir: string, packageDirectoryName: string): Promise<Buffer> {
const tmpPackageDir = joinPath(workingDir, packageDirectoryName);
const zip = new AdmZip();
Expand All @@ -124,6 +121,12 @@ async function createZipArchive(workingDir: string, packageDirectoryName: string
return buffer;
}

function mergeAndSortFields(fields: Field[], dataStreamFields: Field[]): Field[] {
const mergedFields = [...fields, ...dataStreamFields];

return flattenObjectsList(mergedFields);
}

/* eslint-disable @typescript-eslint/naming-convention */
/**
* Creates a package manifest dictionary.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,4 +81,16 @@ describe('createDataStream', () => {
expect(render).toHaveBeenCalledWith(`filestream_manifest.yml.njk`, expect.anything());
expect(render).toHaveBeenCalledWith(`azure_eventhub_manifest.yml.njk`, expect.anything());
});

it('Should return the list of fields', async () => {
const fields = createDataStream(packageName, dataStreamPath, firstDataStream);

expect(Array.isArray(fields)).toBe(true);
fields.forEach((field) => {
expect(field).toMatchObject({
name: expect.any(String),
type: expect.any(String),
});
});
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,16 @@

import nunjucks from 'nunjucks';
import { join as joinPath } from 'path';
import { load } from 'js-yaml';
import type { DataStream } from '../../common';
import { copySync, createSync, ensureDirSync, listDirSync } from '../util';
import { copySync, createSync, ensureDirSync, listDirSync, readSync } from '../util';
import { Field } from '../util/samples';

export function createDataStream(
packageName: string,
specificDataStreamDir: string,
dataStream: DataStream
): void {
): Field[] {
const dataStreamName = dataStream.name;
const pipelineDir = joinPath(specificDataStreamDir, 'elasticsearch', 'ingest_pipeline');
const title = dataStream.title;
Expand All @@ -23,7 +25,7 @@ export function createDataStream(
const useMultilineNDJSON = samplesFormat.name === 'ndjson' && samplesFormat.multiline === true;

ensureDirSync(specificDataStreamDir);
createDataStreamFolders(specificDataStreamDir, pipelineDir);
const fields = createDataStreamFolders(specificDataStreamDir, pipelineDir);
createPipelineTests(specificDataStreamDir, dataStream.rawSamples, packageName, dataStreamName);

const dataStreams: string[] = [];
Expand Down Expand Up @@ -51,19 +53,34 @@ export function createDataStream(
});

createSync(joinPath(specificDataStreamDir, 'manifest.yml'), finalManifest);

return fields;
}

function createDataStreamFolders(specificDataStreamDir: string, pipelineDir: string): Field[] {
ensureDirSync(pipelineDir);
return copyFilesFromTemplateDir(specificDataStreamDir);
}

function createDataStreamFolders(specificDataStreamDir: string, pipelineDir: string): void {
function copyFilesFromTemplateDir(specificDataStreamDir: string): Field[] {
const dataStreamTemplatesDir = joinPath(__dirname, '../templates/data_stream');
const items = listDirSync(dataStreamTemplatesDir);
return items.flatMap((item) => {
const sourcePath = joinPath(dataStreamTemplatesDir, item);
const destinationPath = joinPath(specificDataStreamDir, item);
copySync(sourcePath, destinationPath);
const files = listDirSync(sourcePath);

for (const item of items) {
const s = joinPath(dataStreamTemplatesDir, item);
const d = joinPath(specificDataStreamDir, item);
copySync(s, d);
}
return loadFieldsFromFiles(sourcePath, files);
});
}

ensureDirSync(pipelineDir);
function loadFieldsFromFiles(sourcePath: string, files: string[]): Field[] {
return files.flatMap((file) => {
const filePath = joinPath(sourcePath, file);
const content = readSync(filePath);
return load(content) as Field[];
});
}

function createPipelineTests(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,4 +65,38 @@ describe('createFieldMapping', () => {
);
expect(createSync).toHaveBeenCalledWith(`${dataStreamPath}/fields/fields.yml`, expectedFields);
});

it('Should return all fields flattened', async () => {
const docs: Docs = [
{
key: 'foo',
anotherKey: 'bar',
},
];

const basedFields = `- name: data_stream.type
type: constant_keyword
description: Data stream type.
- name: data_stream.dataset
type: constant_keyword
- name: "@timestamp"
type: date
description: Event timestamp.
`;
(render as jest.Mock).mockReturnValue(basedFields);

const fieldsResult = createFieldMapping(packageName, dataStreamName, dataStreamPath, docs);

expect(fieldsResult).toEqual([
{
name: 'data_stream.type',
type: 'constant_keyword',
description: 'Data stream type.',
},
{ name: 'data_stream.dataset', type: 'constant_keyword' },
{ name: '@timestamp', type: 'date', description: 'Event timestamp.' },
{ name: 'key', type: 'keyword' },
{ name: 'anotherKey', type: 'keyword' },
]);
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -6,36 +6,42 @@
*/

import nunjucks from 'nunjucks';

import { load } from 'js-yaml';
import { Field } from '../util/samples';
import { createSync, generateFields, mergeSamples } from '../util';

export function createFieldMapping(
packageName: string,
dataStreamName: string,
specificDataStreamDir: string,
docs: object[]
): void {
): Field[] {
const dataStreamFieldsDir = `${specificDataStreamDir}/fields`;
createBaseFields(dataStreamFieldsDir, packageName, dataStreamName);
createCustomFields(dataStreamFieldsDir, docs);
const baseFields = createBaseFields(dataStreamFieldsDir, packageName, dataStreamName);
const customFields = createCustomFields(dataStreamFieldsDir, docs);

return [...baseFields, ...customFields];
}

function createBaseFields(
dataStreamFieldsDir: string,
packageName: string,
dataStreamName: string
): void {
): Field[] {
const datasetName = `${packageName}.${dataStreamName}`;
const baseFields = nunjucks.render('base_fields.yml.njk', {
module: packageName,
dataset: datasetName,
});

createSync(`${dataStreamFieldsDir}/base-fields.yml`, baseFields);

return load(baseFields) as Field[];
}

function createCustomFields(dataStreamFieldsDir: string, pipelineResults: object[]): void {
function createCustomFields(dataStreamFieldsDir: string, pipelineResults: object[]): Field[] {
const mergedResults = mergeSamples(pipelineResults);
const fieldKeys = generateFields(mergedResults);
createSync(`${dataStreamFieldsDir}/fields.yml`, fieldKeys);

return load(fieldKeys) as Field[];
}
Loading