Skip to content
This repository was archived by the owner on Jul 9, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
16 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import { LabelResolver, Utility, Orchestrator } from '@microsoft/bf-orchestrator';
import { pathExists, readdir, readJson } from 'fs-extra';

import { cache, warmUpCache } from '../process/orchestratorWorker';
import { cache, handleMessage, warmUpCache } from '../process/orchestratorWorker';

jest.mock('@microsoft/bf-orchestrator');
jest.mock('fs-extra', () => ({
Expand Down Expand Up @@ -158,4 +158,28 @@ describe('Orchestrator Warmup Cache', () => {
false
);
});

it('process.send is called with error message back to orchestratorBuilder', () => {
(Orchestrator.getLabelResolversAsync as jest.Mock).mockImplementationOnce(() => {
throw new Error('Something bad happened');
});

const processSpy = jest.spyOn(process, 'send');

return handleMessage({
id: '1',
payload: {
type: 'warmup',
projectId: 'abc',
modelPath: './modelPath',
files: [],
generatedFolderPath: './generatedFolder',
},
}).then(() => {
expect(processSpy).toHaveBeenCalledWith({
error: { message: 'Something bad happened', stack: expect.anything() },
id: '1',
});
});
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -86,26 +86,36 @@ export async function warmUpCache(generatedFolderPath: string, projectId: string
{ model: orchestratorSettings?.orchestrator?.models?.multilang, lang: 'multilang', luFiles: multiLangLuFiles },
];

const [enMap, multilangMap] = await Promise.all(
modelDatas.map(async (modelData) => {
const snapshotData = await Promise.all(
modelData.luFiles.map(
async (f) =>
[f.replace('.blu', '.lu'), new Uint8Array(await readFile(Path.join(generatedFolderPath, f)))] as [
string,
Uint8Array
]
)
const mergedLabelResolverMap: Map<string, LabelResolver> = new Map();

for (const modelData of modelDatas) {
const snapshotData: [string, Uint8Array][] = await Promise.all(
modelData.luFiles.map(
async (f) =>
[f.replace('.blu', '.lu'), new Uint8Array(await readFile(Path.join(generatedFolderPath, f)))] as [
string,
Uint8Array
]
)
);

if (modelData.model && snapshotData.length) {
//getLabelResolversAsync cannot be run concurrently. Do not wrap in promise.all, it crashes
//the worker and there is no recovery from this.
const labelResolverMap = await Orchestrator.getLabelResolversAsync(
modelData.model,
'',
new Map(snapshotData),
false
);

return modelData.model && snapshotData.length
? await Orchestrator.getLabelResolversAsync(modelData.model, '', new Map(snapshotData), false)
: new Map<string, LabelResolver>();
})
);

cache.set(projectId, new Map([...enMap, ...multilangMap]));

if (labelResolverMap) {
for (const [key, labelResolver] of labelResolverMap.entries()) {
mergedLabelResolverMap.set(key, labelResolver);
}
}
}
}
cache.set(projectId, mergedLabelResolverMap);
return true;
}

Expand Down Expand Up @@ -177,7 +187,7 @@ export async function writeSnapshot(output: IOrchestratorBuildOutput, generatedF
return snapshots;
}

const handleMessage = async (msg: RequestMsg) => {
export const handleMessage = async (msg: RequestMsg) => {
const { payload } = msg;
try {
switch (payload.type) {
Expand All @@ -196,14 +206,10 @@ const handleMessage = async (msg: RequestMsg) => {
}
}
} catch (error) {
return { id: msg.id, error };
process.send?.({ id: msg.id, error: error ? { message: error.message, stack: error.stack } : undefined });
}
};

process.on('message', async (msg: RequestMsg) => {
try {
handleMessage(msg);
} catch (error) {
process.send?.({ id: msg.id, error });
}
handleMessage(msg);
});