diff --git a/src/core/public/saved_objects/saved_objects_client.test.ts b/src/core/public/saved_objects/saved_objects_client.test.ts index 0f37d10b3f32d..101f86b299ffc 100644 --- a/src/core/public/saved_objects/saved_objects_client.test.ts +++ b/src/core/public/saved_objects/saved_objects_client.test.ts @@ -148,13 +148,15 @@ describe('SavedObjectsClient', () => { }); describe('#resolve', () => { - beforeEach(() => { + function mockResolvedObjects(...objects: Array>) { http.fetch.mockResolvedValue({ - resolved_objects: [ - { saved_object: doc, outcome: 'conflict', alias_target_id: 'another-id' }, - ], + resolved_objects: objects.map((obj) => ({ + saved_object: obj, + outcome: 'conflict', + alias_target_id: 'another-id', + })), }); - }); + } test('rejects if `type` parameter is undefined', () => { return expect( @@ -176,6 +178,7 @@ describe('SavedObjectsClient', () => { }); test('makes HTTP call', async () => { + mockResolvedObjects(doc); await savedObjectsClient.resolve(doc.type, doc.id); expect(http.fetch.mock.calls[0]).toMatchInlineSnapshot(` Array [ @@ -191,10 +194,12 @@ describe('SavedObjectsClient', () => { test('batches several #resolve calls into a single HTTP call', async () => { // Await #resolve call to ensure batchQueue is empty and throttle has reset + mockResolvedObjects({ ...doc, type: 'type2' }); await savedObjectsClient.resolve('type2', doc.id); http.fetch.mockClear(); // Make two #resolve calls right after one another + mockResolvedObjects({ ...doc, type: 'type1' }, { ...doc, type: 'type0' }); savedObjectsClient.resolve('type1', doc.id); await savedObjectsClient.resolve('type0', doc.id); expect(http.fetch.mock.calls).toMatchInlineSnapshot(` @@ -213,9 +218,11 @@ describe('SavedObjectsClient', () => { test('removes duplicates when calling `_bulk_resolve`', async () => { // Await #resolve call to ensure batchQueue is empty and throttle has reset + mockResolvedObjects({ ...doc, type: 'type2' }); await savedObjectsClient.resolve('type2', doc.id); http.fetch.mockClear(); + mockResolvedObjects(doc, { ...doc, type: 'some-type', id: 'some-id' }); // the client will only request two objects, so we only mock two results savedObjectsClient.resolve(doc.type, doc.id); savedObjectsClient.resolve('some-type', 'some-id'); await savedObjectsClient.resolve(doc.type, doc.id); @@ -235,9 +242,11 @@ describe('SavedObjectsClient', () => { test('resolves with correct object when there are duplicates present', async () => { // Await #resolve call to ensure batchQueue is empty and throttle has reset + mockResolvedObjects({ ...doc, type: 'type2' }); await savedObjectsClient.resolve('type2', doc.id); http.fetch.mockClear(); + mockResolvedObjects(doc); const call1 = savedObjectsClient.resolve(doc.type, doc.id); const objFromCall2 = await savedObjectsClient.resolve(doc.type, doc.id); const objFromCall1 = await call1; @@ -252,8 +261,10 @@ describe('SavedObjectsClient', () => { test('do not share instances or references between duplicate callers', async () => { // Await #resolve call to ensure batchQueue is empty and throttle has reset await savedObjectsClient.resolve('type2', doc.id); + mockResolvedObjects({ ...doc, type: 'type2' }); http.fetch.mockClear(); + mockResolvedObjects(doc); const call1 = savedObjectsClient.resolve(doc.type, doc.id); const objFromCall2 = await savedObjectsClient.resolve(doc.type, doc.id); const objFromCall1 = await call1; @@ -263,6 +274,7 @@ describe('SavedObjectsClient', () => { }); test('resolves with ResolvedSimpleSavedObject instance', async () => { + mockResolvedObjects(doc); const result = await savedObjectsClient.resolve(doc.type, doc.id); expect(result.saved_object).toBeInstanceOf(SimpleSavedObject); expect(result.saved_object.type).toBe(doc.type); diff --git a/src/core/public/saved_objects/saved_objects_client.ts b/src/core/public/saved_objects/saved_objects_client.ts index cfea406ad662d..2bf1d47ec273c 100644 --- a/src/core/public/saved_objects/saved_objects_client.ts +++ b/src/core/public/saved_objects/saved_objects_client.ts @@ -152,9 +152,7 @@ interface ObjectTypeAndId { type: string; } -const getObjectsToFetch = ( - queue: Array -): ObjectTypeAndId[] => { +const getObjectsToFetch = (queue: BatchGetQueueEntry[]): ObjectTypeAndId[] => { const objects: ObjectTypeAndId[] = []; const inserted = new Set(); queue.forEach(({ id, type }) => { @@ -166,6 +164,24 @@ const getObjectsToFetch = ( return objects; }; +const getObjectsToResolve = (queue: BatchResolveQueueEntry[]) => { + const responseIndices: number[] = []; + const objectsToResolve: ObjectTypeAndId[] = []; + const inserted = new Map(); + queue.forEach(({ id, type }, currentIndex) => { + const key = `${type}|${id}`; + const indexForTypeAndId = inserted.get(key); + if (indexForTypeAndId === undefined) { + inserted.set(key, currentIndex); + objectsToResolve.push({ id, type }); + responseIndices.push(currentIndex); + } else { + responseIndices.push(indexForTypeAndId); + } + }); + return { objectsToResolve, responseIndices }; +}; + /** * Saved Objects is Kibana's data persisentence mechanism allowing plugins to * use Elasticsearch for storing plugin state. The client-side @@ -225,28 +241,18 @@ export class SavedObjectsClient { this.batchResolveQueue = []; try { - const objectsToFetch = getObjectsToFetch(queue); - const { resolved_objects: savedObjects } = await this.performBulkResolve(objectsToFetch); - - queue.forEach((queueItem) => { - const foundObject = savedObjects.find((resolveResponse) => { - return ( - resolveResponse.saved_object.id === queueItem.id && - resolveResponse.saved_object.type === queueItem.type - ); - }); - - if (foundObject) { - // multiple calls may have been requested the same object. - // we need to clone to avoid sharing references between the instances - queueItem.resolve(this.createResolvedSavedObject(cloneDeep(foundObject))); - } else { - queueItem.resolve( - this.createResolvedSavedObject({ - saved_object: pick(queueItem, ['id', 'type']), - } as SavedObjectsResolveResponse) - ); - } + const { objectsToResolve, responseIndices } = getObjectsToResolve(queue); + const { resolved_objects: resolvedObjects } = await this.performBulkResolve( + objectsToResolve + ); + + queue.forEach((queueItem, i) => { + // This differs from the older processBatchGetQueue approach because the resolved object IDs are *not* guaranteed to be the same. + // Instead, we rely on the guarantee that the objects in the bulkResolve response will be in the same order as the requests. + // However, we still need to clone the response object because we deduplicate batched requests. + const responseIndex = responseIndices[i]; + const clone = cloneDeep(resolvedObjects[responseIndex]); + queueItem.resolve(this.createResolvedSavedObject(clone)); }); } catch (err) { queue.forEach((queueItem) => {