Skip to content

Commit

Permalink
Merge pull request #494 from vector-im/DanilaFe/backfill-changes
Browse files Browse the repository at this point in the history
Unit tests for GapWriter, using a new timeline mock utility
  • Loading branch information
bwindels authored Sep 23, 2021
2 parents 6c12f0f + 4b7cb6d commit 45917ea
Show file tree
Hide file tree
Showing 8 changed files with 463 additions and 15 deletions.
2 changes: 2 additions & 0 deletions src/logging/NullLogger.js
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,8 @@ export class NullLogItem {

refDetached() {}

ensureRefId() {}

get level() {
return LogLevel;
}
Expand Down
173 changes: 173 additions & 0 deletions src/matrix/room/timeline/persistence/GapWriter.js
Original file line number Diff line number Diff line change
Expand Up @@ -253,3 +253,176 @@ export class GapWriter {
return {entries, updatedEntries, fragments};
}
}

import {FragmentIdComparer} from "../FragmentIdComparer.js";
import {RelationWriter} from "./RelationWriter.js";
import {createMockStorage} from "../../../../mocks/Storage.js";
import {FragmentBoundaryEntry} from "../entries/FragmentBoundaryEntry.js";
import {NullLogItem} from "../../../../logging/NullLogger.js";
import {TimelineMock, eventIds, eventId} from "../../../../mocks/TimelineMock.ts";
import {SyncWriter} from "./SyncWriter.js";
import {MemberWriter} from "./MemberWriter.js";
import {KeyLimits} from "../../../storage/common";

export function tests() {
const roomId = "!room:hs.tdl";
const alice = "[email protected]";
const logger = new NullLogItem();

async function createGapFillTxn(storage) {
return storage.readWriteTxn([
storage.storeNames.roomMembers,
storage.storeNames.pendingEvents,
storage.storeNames.timelineEvents,
storage.storeNames.timelineRelations,
storage.storeNames.timelineFragments,
]);
}

async function setup() {
const storage = await createMockStorage();
const txn = await createGapFillTxn(storage);
const fragmentIdComparer = new FragmentIdComparer([]);
const relationWriter = new RelationWriter({
roomId, fragmentIdComparer, ownUserId: alice,
});
const gapWriter = new GapWriter({
roomId, storage, fragmentIdComparer, relationWriter
});
const memberWriter = new MemberWriter(roomId);
const syncWriter = new SyncWriter({
roomId,
fragmentIdComparer,
memberWriter,
relationWriter
});
return { storage, txn, fragmentIdComparer, gapWriter, syncWriter, timelineMock: new TimelineMock() };
}

async function syncAndWrite(mocks, { previous, limit } = {}) {
const {txn, timelineMock, syncWriter, fragmentIdComparer} = mocks;
const syncResponse = timelineMock.sync(previous?.next_batch, limit);
const {newLiveKey} = await syncWriter.writeSync(syncResponse, false, false, txn, logger);
syncWriter.afterSync(newLiveKey);
return {
syncResponse,
fragmentEntry: newLiveKey ? FragmentBoundaryEntry.start(
await txn.timelineFragments.get(roomId, newLiveKey.fragmentId),
fragmentIdComparer,
) : null,
};
}

async function backfillAndWrite(mocks, fragmentEntry, limit) {
const {txn, timelineMock, gapWriter} = mocks;
const messageResponse = timelineMock.messages(fragmentEntry.token, undefined, fragmentEntry.direction.asApiString(), limit);
await gapWriter.writeFragmentFill(fragmentEntry, messageResponse, txn, logger);
}

async function allFragmentEvents(mocks, fragmentId) {
const {txn} = mocks;
const entries = await txn.timelineEvents.eventsAfter(roomId, new EventKey(fragmentId, KeyLimits.minStorageKey));
return entries.map(e => e.event);
}

async function fetchFragment(mocks, fragmentId) {
const {txn} = mocks;
return txn.timelineFragments.get(roomId, fragmentId);
}

function assertFilledLink(assert, fragment1, fragment2) {
assert.equal(fragment1.nextId, fragment2.id);
assert.equal(fragment2.previousId, fragment1.id);
assert.equal(fragment1.nextToken, null);
assert.equal(fragment2.previousToken, null);
}

function assertGapLink(assert, fragment1, fragment2) {
assert.equal(fragment1.nextId, fragment2.id);
assert.equal(fragment2.previousId, fragment1.id);
assert.notEqual(fragment2.previousToken, null);
}

return {
"Backfilling after one sync": async assert => {
const mocks = await setup();
const { timelineMock } = mocks;
timelineMock.append(30);
const {fragmentEntry} = await syncAndWrite(mocks);
await backfillAndWrite(mocks, fragmentEntry, 10);
const events = await allFragmentEvents(mocks, fragmentEntry.fragmentId);
assert.deepEqual(events.map(e => e.event_id), eventIds(10, 30));
await mocks.txn.complete();
},
"Backfilling a fragment that is expected to close a gap, and does": async assert => {
const mocks = await setup();
const { timelineMock } = mocks;
timelineMock.append(10);
const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks, { limit: 10 });
timelineMock.append(15);
const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 });
await backfillAndWrite(mocks, secondFragmentEntry, 10);

const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId);
const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId);
assertFilledLink(assert, firstFragment, secondFragment)
const firstEvents = await allFragmentEvents(mocks, firstFragmentEntry.fragmentId);
assert.deepEqual(firstEvents.map(e => e.event_id), eventIds(0, 10));
const secondEvents = await allFragmentEvents(mocks, secondFragmentEntry.fragmentId);
assert.deepEqual(secondEvents.map(e => e.event_id), eventIds(10, 25));
await mocks.txn.complete();
},
"Backfilling a fragment that is expected to close a gap, but doesn't yet": async assert => {
const mocks = await setup();
const { timelineMock } = mocks;
timelineMock.append(10);
const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks, { limit: 10 });
timelineMock.append(20);
const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 });
await backfillAndWrite(mocks, secondFragmentEntry, 10);

const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId);
const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId);
assertGapLink(assert, firstFragment, secondFragment)
const firstEvents = await allFragmentEvents(mocks, firstFragmentEntry.fragmentId);
assert.deepEqual(firstEvents.map(e => e.event_id), eventIds(0, 10));
const secondEvents = await allFragmentEvents(mocks, secondFragmentEntry.fragmentId);
assert.deepEqual(secondEvents.map(e => e.event_id), eventIds(10, 30));
await mocks.txn.complete();
},
"Receiving a sync with the same events as the current fragment does not create infinite link": async assert => {
const mocks = await setup();
const { txn, timelineMock } = mocks;
timelineMock.append(10);
const {syncResponse, fragmentEntry: fragmentEntry} = await syncAndWrite(mocks, { limit: 10 });
// Mess with the saved token to receive old events in backfill
fragmentEntry.token = syncResponse.next_batch;
txn.timelineFragments.update(fragmentEntry.fragment);
await backfillAndWrite(mocks, fragmentEntry, 10);

const fragment = await fetchFragment(mocks, fragmentEntry.fragmentId);
assert.notEqual(fragment.nextId, fragment.id);
assert.notEqual(fragment.previousId, fragment.id);
await mocks.txn.complete();
},
"An event received by sync does not interrupt backfilling": async assert => {
const mocks = await setup();
const { timelineMock } = mocks;
timelineMock.append(10);
const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks, { limit: 10 });
timelineMock.append(11);
const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 });
timelineMock.insertAfter(eventId(9), 5);
await backfillAndWrite(mocks, secondFragmentEntry, 10);

const firstEvents = await allFragmentEvents(mocks, firstFragmentEntry.fragmentId);
assert.deepEqual(firstEvents.map(e => e.event_id), eventIds(0, 10));
const secondEvents = await allFragmentEvents(mocks, secondFragmentEntry.fragmentId);
assert.deepEqual(secondEvents.map(e => e.event_id), [...eventIds(21,26), ...eventIds(10, 21)]);
const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId);
const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId);
assertFilledLink(assert, firstFragment, secondFragment)
await mocks.txn.complete();
}
}
}
17 changes: 14 additions & 3 deletions src/matrix/storage/idb/QueryTarget.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ limitations under the License.
*/

import {iterateCursor, DONE, NOT_DONE, reqAsPromise} from "./utils";
import {Transaction} from "./Transaction";

type Reducer<A,B> = (acc: B, val: A) => B

Expand All @@ -31,9 +32,19 @@ interface QueryTargetInterface<T> {

export class QueryTarget<T> {
protected _target: QueryTargetInterface<T>;
protected _transaction: Transaction;

constructor(target: QueryTargetInterface<T>) {
constructor(target: QueryTargetInterface<T>, transaction: Transaction) {
this._target = target;
this._transaction = transaction;
}

get idbFactory(): IDBFactory {
return this._transaction.idbFactory;
}

get IDBKeyRange(): typeof IDBKeyRange {
return this._transaction.IDBKeyRange;
}

_openCursor(range?: IDBQuery, direction?: IDBCursorDirection): IDBRequest<IDBCursorWithValue | null> {
Expand Down Expand Up @@ -155,11 +166,11 @@ export class QueryTarget<T> {
*/
async findExistingKeys(keys: IDBValidKey[], backwards: boolean, callback: (key: IDBValidKey, found: boolean) => boolean): Promise<void> {
const direction = backwards ? "prev" : "next";
const compareKeys = (a, b) => backwards ? -indexedDB.cmp(a, b) : indexedDB.cmp(a, b);
const compareKeys = (a, b) => backwards ? -this.idbFactory.cmp(a, b) : this.idbFactory.cmp(a, b);
const sortedKeys = keys.slice().sort(compareKeys);
const firstKey = backwards ? sortedKeys[sortedKeys.length - 1] : sortedKeys[0];
const lastKey = backwards ? sortedKeys[0] : sortedKeys[sortedKeys.length - 1];
const cursor = this._target.openKeyCursor(IDBKeyRange.bound(firstKey, lastKey), direction);
const cursor = this._target.openKeyCursor(this.IDBKeyRange.bound(firstKey, lastKey), direction);
let i = 0;
let consumerDone = false;
await iterateCursor(cursor, (value, key) => {
Expand Down
5 changes: 4 additions & 1 deletion src/matrix/storage/idb/Storage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,15 @@ const WEBKITEARLYCLOSETXNBUG_BOGUS_KEY = "782rh281re38-boguskey";
export class Storage {
private _db: IDBDatabase;
private _hasWebkitEarlyCloseTxnBug: boolean;

readonly logger: BaseLogger;
readonly idbFactory: IDBFactory
readonly IDBKeyRange: typeof IDBKeyRange;
readonly storeNames: typeof StoreNames;

constructor(idbDatabase: IDBDatabase, _IDBKeyRange: typeof IDBKeyRange, hasWebkitEarlyCloseTxnBug: boolean, logger: BaseLogger) {
constructor(idbDatabase: IDBDatabase, idbFactory: IDBFactory, _IDBKeyRange: typeof IDBKeyRange, hasWebkitEarlyCloseTxnBug: boolean, logger: BaseLogger) {
this._db = idbDatabase;
this.idbFactory = idbFactory;
this.IDBKeyRange = _IDBKeyRange;
this._hasWebkitEarlyCloseTxnBug = hasWebkitEarlyCloseTxnBug;
this.storeNames = StoreNames;
Expand Down
2 changes: 1 addition & 1 deletion src/matrix/storage/idb/StorageFactory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ export class StorageFactory {

const hasWebkitEarlyCloseTxnBug = await detectWebkitEarlyCloseTxnBug(this._idbFactory);
const db = await openDatabaseWithSessionId(sessionId, this._idbFactory, log);
return new Storage(db, this._IDBKeyRange, hasWebkitEarlyCloseTxnBug, log.logger);
return new Storage(db, this._idbFactory, this._IDBKeyRange, hasWebkitEarlyCloseTxnBug, log.logger);
}

delete(sessionId: string): Promise<IDBDatabase> {
Expand Down
12 changes: 2 additions & 10 deletions src/matrix/storage/idb/Store.ts
Original file line number Diff line number Diff line change
Expand Up @@ -133,24 +133,16 @@ class QueryTargetWrapper<T> {
}

export class Store<T> extends QueryTarget<T> {
private _transaction: Transaction;

constructor(idbStore: IDBObjectStore, transaction: Transaction) {
super(new QueryTargetWrapper<T>(idbStore));
this._transaction = transaction;
}

get IDBKeyRange() {
// @ts-ignore
return this._transaction.IDBKeyRange;
super(new QueryTargetWrapper<T>(idbStore), transaction);
}

get _idbStore(): QueryTargetWrapper<T> {
return (this._target as QueryTargetWrapper<T>);
}

index(indexName: string): QueryTarget<T> {
return new QueryTarget<T>(new QueryTargetWrapper<T>(this._idbStore.index(indexName)));
return new QueryTarget<T>(new QueryTargetWrapper<T>(this._idbStore.index(indexName)), this._transaction);
}

put(value: T, log?: LogItem): void {
Expand Down
4 changes: 4 additions & 0 deletions src/matrix/storage/idb/Transaction.ts
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,10 @@ export class Transaction {
this._writeErrors = [];
}

get idbFactory(): IDBFactory {
return this._storage.idbFactory;
}

get IDBKeyRange(): typeof IDBKeyRange {
return this._storage.IDBKeyRange;
}
Expand Down
Loading

0 comments on commit 45917ea

Please sign in to comment.