Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 17 additions & 5 deletions src/core/public/saved_objects/saved_objects_client.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -148,13 +148,15 @@ describe('SavedObjectsClient', () => {
});

describe('#resolve', () => {
beforeEach(() => {
function mockResolvedObjects(...objects: Array<Record<string, unknown>>) {
http.fetch.mockResolvedValue({
resolved_objects: [
{ saved_object: doc, outcome: 'conflict', alias_target_id: 'another-id' },
],
resolved_objects: objects.map((obj) => ({
saved_object: obj,
outcome: 'conflict',
alias_target_id: 'another-id',
})),
});
});
}

test('rejects if `type` parameter is undefined', () => {
return expect(
Expand All @@ -176,6 +178,7 @@ describe('SavedObjectsClient', () => {
});

test('makes HTTP call', async () => {
mockResolvedObjects(doc);
await savedObjectsClient.resolve(doc.type, doc.id);
expect(http.fetch.mock.calls[0]).toMatchInlineSnapshot(`
Array [
Expand All @@ -191,10 +194,12 @@ describe('SavedObjectsClient', () => {

test('batches several #resolve calls into a single HTTP call', async () => {
// Await #resolve call to ensure batchQueue is empty and throttle has reset
mockResolvedObjects({ ...doc, type: 'type2' });
await savedObjectsClient.resolve('type2', doc.id);
http.fetch.mockClear();

// Make two #resolve calls right after one another
mockResolvedObjects({ ...doc, type: 'type1' }, { ...doc, type: 'type0' });
savedObjectsClient.resolve('type1', doc.id);
await savedObjectsClient.resolve('type0', doc.id);
expect(http.fetch.mock.calls).toMatchInlineSnapshot(`
Expand All @@ -213,9 +218,11 @@ describe('SavedObjectsClient', () => {

test('removes duplicates when calling `_bulk_resolve`', async () => {
// Await #resolve call to ensure batchQueue is empty and throttle has reset
mockResolvedObjects({ ...doc, type: 'type2' });
await savedObjectsClient.resolve('type2', doc.id);
http.fetch.mockClear();

mockResolvedObjects(doc, { ...doc, type: 'some-type', id: 'some-id' }); // the client will only request two objects, so we only mock two results
savedObjectsClient.resolve(doc.type, doc.id);
savedObjectsClient.resolve('some-type', 'some-id');
await savedObjectsClient.resolve(doc.type, doc.id);
Expand All @@ -235,9 +242,11 @@ describe('SavedObjectsClient', () => {

test('resolves with correct object when there are duplicates present', async () => {
// Await #resolve call to ensure batchQueue is empty and throttle has reset
mockResolvedObjects({ ...doc, type: 'type2' });
await savedObjectsClient.resolve('type2', doc.id);
http.fetch.mockClear();

mockResolvedObjects(doc);
const call1 = savedObjectsClient.resolve(doc.type, doc.id);
const objFromCall2 = await savedObjectsClient.resolve(doc.type, doc.id);
const objFromCall1 = await call1;
Expand All @@ -252,8 +261,10 @@ describe('SavedObjectsClient', () => {
test('do not share instances or references between duplicate callers', async () => {
// Await #resolve call to ensure batchQueue is empty and throttle has reset
await savedObjectsClient.resolve('type2', doc.id);
mockResolvedObjects({ ...doc, type: 'type2' });
http.fetch.mockClear();

mockResolvedObjects(doc);
const call1 = savedObjectsClient.resolve(doc.type, doc.id);
const objFromCall2 = await savedObjectsClient.resolve(doc.type, doc.id);
const objFromCall1 = await call1;
Expand All @@ -263,6 +274,7 @@ describe('SavedObjectsClient', () => {
});

test('resolves with ResolvedSimpleSavedObject instance', async () => {
mockResolvedObjects(doc);
const result = await savedObjectsClient.resolve(doc.type, doc.id);
expect(result.saved_object).toBeInstanceOf(SimpleSavedObject);
expect(result.saved_object.type).toBe(doc.type);
Expand Down
56 changes: 31 additions & 25 deletions src/core/public/saved_objects/saved_objects_client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -152,9 +152,7 @@ interface ObjectTypeAndId {
type: string;
}

const getObjectsToFetch = (
queue: Array<BatchGetQueueEntry | BatchResolveQueueEntry>
): ObjectTypeAndId[] => {
const getObjectsToFetch = (queue: BatchGetQueueEntry[]): ObjectTypeAndId[] => {
const objects: ObjectTypeAndId[] = [];
const inserted = new Set<string>();
queue.forEach(({ id, type }) => {
Expand All @@ -166,6 +164,24 @@ const getObjectsToFetch = (
return objects;
};

const getObjectsToResolve = (queue: BatchResolveQueueEntry[]) => {
const responseIndices: number[] = [];
const objectsToResolve: ObjectTypeAndId[] = [];
const inserted = new Map<string, number>();
queue.forEach(({ id, type }, currentIndex) => {
const key = `${type}|${id}`;
const indexForTypeAndId = inserted.get(key);
if (indexForTypeAndId === undefined) {
inserted.set(key, currentIndex);
objectsToResolve.push({ id, type });
responseIndices.push(currentIndex);
} else {
responseIndices.push(indexForTypeAndId);
}
});
return { objectsToResolve, responseIndices };
};

/**
* Saved Objects is Kibana's data persisentence mechanism allowing plugins to
* use Elasticsearch for storing plugin state. The client-side
Expand Down Expand Up @@ -225,28 +241,18 @@ export class SavedObjectsClient {
this.batchResolveQueue = [];

try {
const objectsToFetch = getObjectsToFetch(queue);
const { resolved_objects: savedObjects } = await this.performBulkResolve(objectsToFetch);

queue.forEach((queueItem) => {
const foundObject = savedObjects.find((resolveResponse) => {
return (
resolveResponse.saved_object.id === queueItem.id &&
resolveResponse.saved_object.type === queueItem.type
);
});

if (foundObject) {
// multiple calls may have been requested the same object.
// we need to clone to avoid sharing references between the instances
queueItem.resolve(this.createResolvedSavedObject(cloneDeep(foundObject)));
} else {
queueItem.resolve(
this.createResolvedSavedObject({
saved_object: pick(queueItem, ['id', 'type']),
} as SavedObjectsResolveResponse)
);
}
const { objectsToResolve, responseIndices } = getObjectsToResolve(queue);
const { resolved_objects: resolvedObjects } = await this.performBulkResolve(
objectsToResolve
);

queue.forEach((queueItem, i) => {
// This differs from the older processBatchGetQueue approach because the resolved object IDs are *not* guaranteed to be the same.
// Instead, we rely on the guarantee that the objects in the bulkResolve response will be in the same order as the requests.
// However, we still need to clone the response object because we deduplicate batched requests.
const responseIndex = responseIndices[i];
const clone = cloneDeep(resolvedObjects[responseIndex]);
queueItem.resolve(this.createResolvedSavedObject(clone));
});
} catch (err) {
queue.forEach((queueItem) => {
Expand Down