From 71694787cd63b4f6a94701e21008c29f96bd4d61 Mon Sep 17 00:00:00 2001 From: Danila Fedorin Date: Mon, 13 Sep 2021 16:55:55 -0700 Subject: [PATCH 01/19] Add an IDBFactory mock parameter --- src/matrix/storage/idb/QueryTarget.ts | 6 ++++-- src/matrix/storage/idb/Storage.ts | 8 +++++--- src/matrix/storage/idb/StorageFactory.ts | 2 +- src/matrix/storage/idb/Store.ts | 6 +++--- src/matrix/storage/idb/Transaction.ts | 10 ++++++---- 5 files changed, 19 insertions(+), 13 deletions(-) diff --git a/src/matrix/storage/idb/QueryTarget.ts b/src/matrix/storage/idb/QueryTarget.ts index e3b77810..c8fc8df1 100644 --- a/src/matrix/storage/idb/QueryTarget.ts +++ b/src/matrix/storage/idb/QueryTarget.ts @@ -31,9 +31,11 @@ interface QueryTargetInterface { export class QueryTarget { protected _target: QueryTargetInterface; + protected _idbFactory: IDBFactory - constructor(target: QueryTargetInterface) { + constructor(target: QueryTargetInterface, idbFactory: IDBFactory) { this._target = target; + this._idbFactory = idbFactory; } _openCursor(range?: IDBQuery, direction?: IDBCursorDirection): IDBRequest { @@ -155,7 +157,7 @@ export class QueryTarget { */ async findExistingKeys(keys: IDBValidKey[], backwards: boolean, callback: (key: IDBValidKey, found: boolean) => boolean): Promise { const direction = backwards ? "prev" : "next"; - const compareKeys = (a, b) => backwards ? -indexedDB.cmp(a, b) : indexedDB.cmp(a, b); + const compareKeys = (a, b) => backwards ? -this._idbFactory.cmp(a, b) : this._idbFactory.cmp(a, b); const sortedKeys = keys.slice().sort(compareKeys); const firstKey = backwards ? sortedKeys[sortedKeys.length - 1] : sortedKeys[0]; const lastKey = backwards ? sortedKeys[0] : sortedKeys[sortedKeys.length - 1]; diff --git a/src/matrix/storage/idb/Storage.ts b/src/matrix/storage/idb/Storage.ts index bffdb642..7e02c111 100644 --- a/src/matrix/storage/idb/Storage.ts +++ b/src/matrix/storage/idb/Storage.ts @@ -23,11 +23,13 @@ const WEBKITEARLYCLOSETXNBUG_BOGUS_KEY = "782rh281re38-boguskey"; export class Storage { private _db: IDBDatabase; private _hasWebkitEarlyCloseTxnBug: boolean; + private _idbFactory: IDBFactory private _IDBKeyRange: typeof IDBKeyRange storeNames: typeof StoreNames; - constructor(idbDatabase: IDBDatabase, _IDBKeyRange: typeof IDBKeyRange, hasWebkitEarlyCloseTxnBug: boolean) { + constructor(idbDatabase: IDBDatabase, idbFactory: IDBFactory, _IDBKeyRange: typeof IDBKeyRange, hasWebkitEarlyCloseTxnBug: boolean) { this._db = idbDatabase; + this._idbFactory = idbFactory; this._IDBKeyRange = _IDBKeyRange; this._hasWebkitEarlyCloseTxnBug = hasWebkitEarlyCloseTxnBug; this.storeNames = StoreNames; @@ -49,7 +51,7 @@ export class Storage { if (this._hasWebkitEarlyCloseTxnBug) { await reqAsPromise(txn.objectStore(storeNames[0]).get(WEBKITEARLYCLOSETXNBUG_BOGUS_KEY)); } - return new Transaction(txn, storeNames, this._IDBKeyRange); + return new Transaction(txn, storeNames, this._idbFactory, this._IDBKeyRange); } catch(err) { throw new StorageError("readTxn failed", err); } @@ -64,7 +66,7 @@ export class Storage { if (this._hasWebkitEarlyCloseTxnBug) { await reqAsPromise(txn.objectStore(storeNames[0]).get(WEBKITEARLYCLOSETXNBUG_BOGUS_KEY)); } - return new Transaction(txn, storeNames, this._IDBKeyRange); + return new Transaction(txn, storeNames, this._idbFactory, this._IDBKeyRange); } catch(err) { throw new StorageError("readWriteTxn failed", err); } diff --git a/src/matrix/storage/idb/StorageFactory.ts b/src/matrix/storage/idb/StorageFactory.ts index 6db78c36..55e6f604 100644 --- a/src/matrix/storage/idb/StorageFactory.ts +++ b/src/matrix/storage/idb/StorageFactory.ts @@ -70,7 +70,7 @@ export class StorageFactory { const hasWebkitEarlyCloseTxnBug = await detectWebkitEarlyCloseTxnBug(this._idbFactory); const db = await openDatabaseWithSessionId(sessionId, this._idbFactory, log); - return new Storage(db, this._IDBKeyRange, hasWebkitEarlyCloseTxnBug); + return new Storage(db, this._idbFactory, this._IDBKeyRange, hasWebkitEarlyCloseTxnBug); } delete(sessionId: string): Promise { diff --git a/src/matrix/storage/idb/Store.ts b/src/matrix/storage/idb/Store.ts index e2da0707..6dafd90e 100644 --- a/src/matrix/storage/idb/Store.ts +++ b/src/matrix/storage/idb/Store.ts @@ -130,8 +130,8 @@ class QueryTargetWrapper { export class Store extends QueryTarget { private _transaction: Transaction; - constructor(idbStore: IDBObjectStore, transaction: Transaction) { - super(new QueryTargetWrapper(idbStore)); + constructor(idbStore: IDBObjectStore, transaction: Transaction, idbFactory: IDBFactory) { + super(new QueryTargetWrapper(idbStore), idbFactory); this._transaction = transaction; } @@ -145,7 +145,7 @@ export class Store extends QueryTarget { } index(indexName: string): QueryTarget { - return new QueryTarget(new QueryTargetWrapper(this._idbStore.index(indexName))); + return new QueryTarget(new QueryTargetWrapper(this._idbStore.index(indexName)), this._idbFactory); } put(value: T): void { diff --git a/src/matrix/storage/idb/Transaction.ts b/src/matrix/storage/idb/Transaction.ts index ea21b745..1eaf0caf 100644 --- a/src/matrix/storage/idb/Transaction.ts +++ b/src/matrix/storage/idb/Transaction.ts @@ -40,13 +40,15 @@ export class Transaction { private _txn: IDBTransaction; private _allowedStoreNames: StoreNames[]; private _stores: { [storeName in StoreNames]?: any }; + idbFactory: IDBFactory + IDBKeyRange: typeof IDBKeyRange - constructor(txn: IDBTransaction, allowedStoreNames: StoreNames[], IDBKeyRange) { + constructor(txn: IDBTransaction, allowedStoreNames: StoreNames[], _idbFactory: IDBFactory, _IDBKeyRange: typeof IDBKeyRange) { this._txn = txn; this._allowedStoreNames = allowedStoreNames; this._stores = {}; - // @ts-ignore - this.IDBKeyRange = IDBKeyRange; + this.idbFactory = _idbFactory; + this.IDBKeyRange = _IDBKeyRange; } _idbStore(name: StoreNames): Store { @@ -54,7 +56,7 @@ export class Transaction { // more specific error? this is a bug, so maybe not ... throw new StorageError(`Invalid store for transaction: ${name}, only ${this._allowedStoreNames.join(", ")} are allowed.`); } - return new Store(this._txn.objectStore(name), this); + return new Store(this._txn.objectStore(name), this, this.idbFactory); } _store(name: StoreNames, mapStore: (idbStore: Store) => T): T { From 713f675f3a4e63520080dcc535b43def72958473 Mon Sep 17 00:00:00 2001 From: Danila Fedorin Date: Mon, 13 Sep 2021 17:00:49 -0700 Subject: [PATCH 02/19] Mock IDBKeyRange, too --- src/matrix/storage/idb/QueryTarget.ts | 6 ++++-- src/matrix/storage/idb/Store.ts | 6 +++--- src/matrix/storage/idb/Transaction.ts | 2 +- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/src/matrix/storage/idb/QueryTarget.ts b/src/matrix/storage/idb/QueryTarget.ts index c8fc8df1..8ed702da 100644 --- a/src/matrix/storage/idb/QueryTarget.ts +++ b/src/matrix/storage/idb/QueryTarget.ts @@ -32,10 +32,12 @@ interface QueryTargetInterface { export class QueryTarget { protected _target: QueryTargetInterface; protected _idbFactory: IDBFactory + protected _IDBKeyRange: typeof IDBKeyRange - constructor(target: QueryTargetInterface, idbFactory: IDBFactory) { + constructor(target: QueryTargetInterface, idbFactory: IDBFactory, _IDBKeyRange: typeof IDBKeyRange) { this._target = target; this._idbFactory = idbFactory; + this._IDBKeyRange = _IDBKeyRange; } _openCursor(range?: IDBQuery, direction?: IDBCursorDirection): IDBRequest { @@ -161,7 +163,7 @@ export class QueryTarget { const sortedKeys = keys.slice().sort(compareKeys); const firstKey = backwards ? sortedKeys[sortedKeys.length - 1] : sortedKeys[0]; const lastKey = backwards ? sortedKeys[0] : sortedKeys[sortedKeys.length - 1]; - const cursor = this._target.openKeyCursor(IDBKeyRange.bound(firstKey, lastKey), direction); + const cursor = this._target.openKeyCursor(this._IDBKeyRange.bound(firstKey, lastKey), direction); let i = 0; let consumerDone = false; await iterateCursor(cursor, (value, key) => { diff --git a/src/matrix/storage/idb/Store.ts b/src/matrix/storage/idb/Store.ts index 6dafd90e..1093e73d 100644 --- a/src/matrix/storage/idb/Store.ts +++ b/src/matrix/storage/idb/Store.ts @@ -130,8 +130,8 @@ class QueryTargetWrapper { export class Store extends QueryTarget { private _transaction: Transaction; - constructor(idbStore: IDBObjectStore, transaction: Transaction, idbFactory: IDBFactory) { - super(new QueryTargetWrapper(idbStore), idbFactory); + constructor(idbStore: IDBObjectStore, transaction: Transaction) { + super(new QueryTargetWrapper(idbStore), transaction.idbFactory, transaction.IDBKeyRange); this._transaction = transaction; } @@ -145,7 +145,7 @@ export class Store extends QueryTarget { } index(indexName: string): QueryTarget { - return new QueryTarget(new QueryTargetWrapper(this._idbStore.index(indexName)), this._idbFactory); + return new QueryTarget(new QueryTargetWrapper(this._idbStore.index(indexName)), this._idbFactory, this._IDBKeyRange); } put(value: T): void { diff --git a/src/matrix/storage/idb/Transaction.ts b/src/matrix/storage/idb/Transaction.ts index 1eaf0caf..d0a8748f 100644 --- a/src/matrix/storage/idb/Transaction.ts +++ b/src/matrix/storage/idb/Transaction.ts @@ -56,7 +56,7 @@ export class Transaction { // more specific error? this is a bug, so maybe not ... throw new StorageError(`Invalid store for transaction: ${name}, only ${this._allowedStoreNames.join(", ")} are allowed.`); } - return new Store(this._txn.objectStore(name), this, this.idbFactory); + return new Store(this._txn.objectStore(name), this); } _store(name: StoreNames, mapStore: (idbStore: Store) => T): T { From b3df37b0bc990ef2b9888248a16418c33ab0d1bc Mon Sep 17 00:00:00 2001 From: Danila Fedorin Date: Mon, 13 Sep 2021 17:01:32 -0700 Subject: [PATCH 03/19] Add the beginning of a tests function for GapWriter --- .../room/timeline/persistence/GapWriter.js | 87 +++++++++++++++++++ 1 file changed, 87 insertions(+) diff --git a/src/matrix/room/timeline/persistence/GapWriter.js b/src/matrix/room/timeline/persistence/GapWriter.js index e133d713..223c5b97 100644 --- a/src/matrix/room/timeline/persistence/GapWriter.js +++ b/src/matrix/room/timeline/persistence/GapWriter.js @@ -252,3 +252,90 @@ export class GapWriter { return {entries, updatedEntries, fragments}; } } + +import {FragmentIdComparer} from "../FragmentIdComparer.js"; +import {RelationWriter} from "./RelationWriter.js"; +import {createMockStorage} from "../../../../mocks/Storage.js"; +import {FragmentBoundaryEntry} from "../entries/FragmentBoundaryEntry.js"; +import {createEvent, withTextBody, withContent, withSender} from "../../../../mocks/event.js"; + +export function tests() { + const alice = "alice@hs.tdl"; + const bob = "bob@hs.tdl"; + const roomId = "!room:hs.tdl"; + const startToken = "begin_token"; + const endToken = "end_token"; + + class EventCreator { + constructor() { + this.counter = 0; + } + + nextEvent() { + const event = withTextBody(`This is event ${this.counter}`, withSender(bob, createEvent("m.room.message", `!event${this.counter}`))); + this.counter++; + return event; + } + + nextEvents(n) { + const events = []; + for (let i = 0; i < n; i++) { + events.push(this.nextEvent()); + } + return events; + } + + createMessagesResponse() { + return { + start: startToken, + end: endToken, + chunk: this.nextEvents(5), + state: [] + } + } + } + + async function createGapFillTxn(storage) { + return storage.readWriteTxn([ + storage.storeNames.pendingEvents, + storage.storeNames.timelineEvents, + storage.storeNames.timelineRelations, + storage.storeNames.timelineFragments, + ]); + } + + async function setup() { + const storage = await createMockStorage(); + const txn = await createGapFillTxn(storage); + const fragmentIdComparer = new FragmentIdComparer([]); + const relationWriter = new RelationWriter({ + roomId, fragmentIdComparer, ownUserId: alice, + }); + const gapWriter = new GapWriter({ + roomId, storage, fragmentIdComparer, relationWriter + }); + return { storage, txn, fragmentIdComparer, gapWriter, eventCreator: new EventCreator() }; + } + + async function createFragment(id, txn, fragmentIdComparer, overrides = {}) { + const newFragment = Object.assign({ + roomId, id, + previousId: null, + nextId: null, + nextToken: null, + previousToken: null + }, overrides); + await txn.timelineFragments.add(newFragment); + fragmentIdComparer.add(newFragment); + return newFragment; + } + + return { + "Backfilling an empty fragment": async assert => { + const { storage, txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); + const emptyFragment = await createFragment(0, txn, fragmentIdComparer, { previousToken: startToken }); + const newEntry = FragmentBoundaryEntry.start(emptyFragment, fragmentIdComparer); + await gapWriter.writeFragmentFill(newEntry, eventCreator.createMessagesResponse(), txn, null); + } + } +} From 31577cd4963178e722374248d1b2db4e48c6bd9e Mon Sep 17 00:00:00 2001 From: Danila Fedorin Date: Tue, 14 Sep 2021 10:24:18 -0700 Subject: [PATCH 04/19] Draft first two tests --- .../room/timeline/persistence/GapWriter.js | 43 ++++++++++++++++++- 1 file changed, 41 insertions(+), 2 deletions(-) diff --git a/src/matrix/room/timeline/persistence/GapWriter.js b/src/matrix/room/timeline/persistence/GapWriter.js index 223c5b97..a7d85366 100644 --- a/src/matrix/room/timeline/persistence/GapWriter.js +++ b/src/matrix/room/timeline/persistence/GapWriter.js @@ -332,10 +332,49 @@ export function tests() { return { "Backfilling an empty fragment": async assert => { - const { storage, txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); + const { txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); const emptyFragment = await createFragment(0, txn, fragmentIdComparer, { previousToken: startToken }); const newEntry = FragmentBoundaryEntry.start(emptyFragment, fragmentIdComparer); - await gapWriter.writeFragmentFill(newEntry, eventCreator.createMessagesResponse(), txn, null); + + const response = eventCreator.createMessagesResponse(); + await gapWriter.writeFragmentFill(newEntry, response, txn, null); + + const allEvents = await txn.timelineEvents.eventsAfter(roomId, EventKey.minKey, 100 /* fetch all */); + for (let i = 0; i < response.chunk.length; i++) { + const responseEvent = response.chunk[response.chunk.length - 1 - i]; + const storedEvent = allEvents[i]; + assert.deepEqual(responseEvent, storedEvent.event); + } + await txn.complete(); + }, + "Backfilling a fragment with existing entries": async assert => { + const { txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); + const emptyFragment = await createFragment(0, txn, fragmentIdComparer, { previousToken: startToken }); + const newEntry = FragmentBoundaryEntry.start(emptyFragment, fragmentIdComparer); + + let initialKey = EventKey.defaultFragmentKey(0); + const initialEntries = eventCreator.nextEvents(10); + initialEntries.forEach(e => { + txn.timelineEvents.insert(createEventEntry(initialKey, roomId, e)) + initialKey = initialKey.nextKey(); + }); + + const response = eventCreator.createMessagesResponse(); + await gapWriter.writeFragmentFill(newEntry, response, txn, null); + + const allEvents = await txn.timelineEvents.eventsAfter(roomId, EventKey.minKey, 100 /* fetch all */); + let i = 0; + for (; i < response.chunk.length; i++) { + const responseEvent = response.chunk[response.chunk.length - 1 - i]; + const storedEvent = allEvents[i]; + assert.deepEqual(responseEvent, storedEvent.event); + } + for (const initialEntry of initialEntries) { + const storedEvent = allEvents[i++]; + assert.deepEqual(initialEntry, storedEvent.event); + } + + await txn.complete() } } } From 41e568f7831aa7f94254131067423e7e97a00d2f Mon Sep 17 00:00:00 2001 From: Danila Fedorin Date: Tue, 14 Sep 2021 11:15:13 -0700 Subject: [PATCH 05/19] Add more tests and extract common test code --- .../room/timeline/persistence/GapWriter.js | 82 ++++++++++++++++--- 1 file changed, 72 insertions(+), 10 deletions(-) diff --git a/src/matrix/room/timeline/persistence/GapWriter.js b/src/matrix/room/timeline/persistence/GapWriter.js index a7d85366..54476ad4 100644 --- a/src/matrix/room/timeline/persistence/GapWriter.js +++ b/src/matrix/room/timeline/persistence/GapWriter.js @@ -330,6 +330,25 @@ export function tests() { return newFragment; } + function prefillFragment(txn, eventCreator, fragment, n) { + let initialKey = EventKey.defaultFragmentKey(fragment.id); + const initialEntries = eventCreator.nextEvents(n); + initialEntries.forEach(e => { + txn.timelineEvents.insert(createEventEntry(initialKey, roomId, e)) + initialKey = initialKey.nextKey(); + }); + return initialEntries; + } + + async function assertTightLink(assert, txn, fragmentId1, fragmentId2) { + const fragment1 = await txn.timelineFragments.get(roomId, fragmentId1); + const fragment2 = await txn.timelineFragments.get(roomId, fragmentId2); + assert.equal(fragment1.nextId, fragment2.id); + assert.equal(fragment2.previousId, fragment1.id); + assert.equal(fragment2.previousToken, null); + assert.equal(fragment2.nextToken, null); + } + return { "Backfilling an empty fragment": async assert => { const { txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); @@ -341,7 +360,7 @@ export function tests() { const allEvents = await txn.timelineEvents.eventsAfter(roomId, EventKey.minKey, 100 /* fetch all */); for (let i = 0; i < response.chunk.length; i++) { - const responseEvent = response.chunk[response.chunk.length - 1 - i]; + const responseEvent = response.chunk.at(-i - 1); const storedEvent = allEvents[i]; assert.deepEqual(responseEvent, storedEvent.event); } @@ -349,15 +368,10 @@ export function tests() { }, "Backfilling a fragment with existing entries": async assert => { const { txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); - const emptyFragment = await createFragment(0, txn, fragmentIdComparer, { previousToken: startToken }); - const newEntry = FragmentBoundaryEntry.start(emptyFragment, fragmentIdComparer); + const liveFragment = await createFragment(0, txn, fragmentIdComparer, { previousToken: startToken }); + const newEntry = FragmentBoundaryEntry.start(liveFragment, fragmentIdComparer); - let initialKey = EventKey.defaultFragmentKey(0); - const initialEntries = eventCreator.nextEvents(10); - initialEntries.forEach(e => { - txn.timelineEvents.insert(createEventEntry(initialKey, roomId, e)) - initialKey = initialKey.nextKey(); - }); + const initialEntries = await prefillFragment(txn, eventCreator, liveFragment, 10); const response = eventCreator.createMessagesResponse(); await gapWriter.writeFragmentFill(newEntry, response, txn, null); @@ -365,7 +379,7 @@ export function tests() { const allEvents = await txn.timelineEvents.eventsAfter(roomId, EventKey.minKey, 100 /* fetch all */); let i = 0; for (; i < response.chunk.length; i++) { - const responseEvent = response.chunk[response.chunk.length - 1 - i]; + const responseEvent = response.chunk.at(-i - 1); const storedEvent = allEvents[i]; assert.deepEqual(responseEvent, storedEvent.event); } @@ -375,6 +389,54 @@ export function tests() { } await txn.complete() + }, + "Backfilling a fragment that is expected to link up": async assert => { + const { txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); + const existingFragment = await createFragment(0, txn, fragmentIdComparer, { nextId: 1, nextToken: startToken }); + const liveFragment = await createFragment(1, txn, fragmentIdComparer, { previousId: 0, previousToken: startToken }); + const newEntry = FragmentBoundaryEntry.start(liveFragment, fragmentIdComparer); + + const initialEntries = await prefillFragment(txn, eventCreator, existingFragment, 10); + const response = eventCreator.createMessagesResponse(); + response.chunk.push(initialEntries.at(-1)); /* Expect overlap */ + await gapWriter.writeFragmentFill(newEntry, response, txn, null); + + const allEvents = await txn.timelineEvents._timelineStore.selectAll(); + let i = 0; + for (const initialEntry of initialEntries) { + const storedEvent = allEvents[i++]; + assert.deepEqual(initialEntry, storedEvent.event); + } + for (let j = 0; j < response.chunk.length - 1; j++) { + const responseEvent = response.chunk.at(-j - 2); + const storedEvent = allEvents[i + j]; + assert.deepEqual(responseEvent, storedEvent.event); + } + await assertTightLink(assert, txn, 0, 1); + }, + "Backfilling a fragment that is not expected to link up": async assert => { + const { txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); + const existingFragment = await createFragment(0, txn, fragmentIdComparer, { nextToken: startToken }); + const liveFragment = await createFragment(1, txn, fragmentIdComparer, { previousToken: startToken }); + const newEntry = FragmentBoundaryEntry.start(liveFragment, fragmentIdComparer); + + const initialEntries = await prefillFragment(txn, eventCreator, existingFragment, 10); + const response = eventCreator.createMessagesResponse(); + response.chunk.push(initialEntries.at(-1)); /* Fake overlap */ + await gapWriter.writeFragmentFill(newEntry, response, txn, null); + + const allEvents = await txn.timelineEvents._timelineStore.selectAll(); + let i = 0; + for (const initialEntry of initialEntries) { + const storedEvent = allEvents[i++]; + assert.deepEqual(initialEntry, storedEvent.event); + } + for (let j = 0; j < response.chunk.length - 1; j++) { + const responseEvent = response.chunk.at(-j - 2); + const storedEvent = allEvents[i + j]; + assert.deepEqual(responseEvent, storedEvent.event); + } + await assertTightLink(assert, txn, 0, 1); } } } From f8117b6f986d662fc9961b84874aec861780beae Mon Sep 17 00:00:00 2001 From: Danila Fedorin Date: Tue, 14 Sep 2021 11:18:24 -0700 Subject: [PATCH 06/19] Lift transaction property to QueryTarget --- src/matrix/storage/idb/QueryTarget.ts | 21 ++++++++++++++------- src/matrix/storage/idb/Store.ts | 12 ++---------- 2 files changed, 16 insertions(+), 17 deletions(-) diff --git a/src/matrix/storage/idb/QueryTarget.ts b/src/matrix/storage/idb/QueryTarget.ts index 8ed702da..3ab33a6b 100644 --- a/src/matrix/storage/idb/QueryTarget.ts +++ b/src/matrix/storage/idb/QueryTarget.ts @@ -15,6 +15,7 @@ limitations under the License. */ import {iterateCursor, DONE, NOT_DONE, reqAsPromise} from "./utils"; +import {Transaction} from "./Transaction"; type Reducer = (acc: B, val: A) => B @@ -31,13 +32,19 @@ interface QueryTargetInterface { export class QueryTarget { protected _target: QueryTargetInterface; - protected _idbFactory: IDBFactory - protected _IDBKeyRange: typeof IDBKeyRange + protected _transaction: Transaction; - constructor(target: QueryTargetInterface, idbFactory: IDBFactory, _IDBKeyRange: typeof IDBKeyRange) { + constructor(target: QueryTargetInterface, transaction: Transaction) { this._target = target; - this._idbFactory = idbFactory; - this._IDBKeyRange = _IDBKeyRange; + this._transaction = transaction; + } + + get idbFactory(): IDBFactory { + return this._transaction.idbFactory; + } + + get IDBKeyRange(): typeof IDBKeyRange { + return this._transaction.IDBKeyRange; } _openCursor(range?: IDBQuery, direction?: IDBCursorDirection): IDBRequest { @@ -159,11 +166,11 @@ export class QueryTarget { */ async findExistingKeys(keys: IDBValidKey[], backwards: boolean, callback: (key: IDBValidKey, found: boolean) => boolean): Promise { const direction = backwards ? "prev" : "next"; - const compareKeys = (a, b) => backwards ? -this._idbFactory.cmp(a, b) : this._idbFactory.cmp(a, b); + const compareKeys = (a, b) => backwards ? -this.idbFactory.cmp(a, b) : this.idbFactory.cmp(a, b); const sortedKeys = keys.slice().sort(compareKeys); const firstKey = backwards ? sortedKeys[sortedKeys.length - 1] : sortedKeys[0]; const lastKey = backwards ? sortedKeys[0] : sortedKeys[sortedKeys.length - 1]; - const cursor = this._target.openKeyCursor(this._IDBKeyRange.bound(firstKey, lastKey), direction); + const cursor = this._target.openKeyCursor(this.IDBKeyRange.bound(firstKey, lastKey), direction); let i = 0; let consumerDone = false; await iterateCursor(cursor, (value, key) => { diff --git a/src/matrix/storage/idb/Store.ts b/src/matrix/storage/idb/Store.ts index 1093e73d..0930888c 100644 --- a/src/matrix/storage/idb/Store.ts +++ b/src/matrix/storage/idb/Store.ts @@ -128,16 +128,8 @@ class QueryTargetWrapper { } export class Store extends QueryTarget { - private _transaction: Transaction; - constructor(idbStore: IDBObjectStore, transaction: Transaction) { - super(new QueryTargetWrapper(idbStore), transaction.idbFactory, transaction.IDBKeyRange); - this._transaction = transaction; - } - - get IDBKeyRange() { - // @ts-ignore - return this._transaction.IDBKeyRange; + super(new QueryTargetWrapper(idbStore), transaction); } get _idbStore(): QueryTargetWrapper { @@ -145,7 +137,7 @@ export class Store extends QueryTarget { } index(indexName: string): QueryTarget { - return new QueryTarget(new QueryTargetWrapper(this._idbStore.index(indexName)), this._idbFactory, this._IDBKeyRange); + return new QueryTarget(new QueryTargetWrapper(this._idbStore.index(indexName)), this._transaction); } put(value: T): void { From b2b5690739bd216f3c90f1c4ee5e47bcc98b56e5 Mon Sep 17 00:00:00 2001 From: Danila Fedorin Date: Tue, 14 Sep 2021 13:54:14 -0700 Subject: [PATCH 07/19] Add more tests --- .../room/timeline/persistence/GapWriter.js | 83 ++++++++++++++++++- 1 file changed, 80 insertions(+), 3 deletions(-) diff --git a/src/matrix/room/timeline/persistence/GapWriter.js b/src/matrix/room/timeline/persistence/GapWriter.js index 54476ad4..1a40f23e 100644 --- a/src/matrix/room/timeline/persistence/GapWriter.js +++ b/src/matrix/room/timeline/persistence/GapWriter.js @@ -258,6 +258,7 @@ import {RelationWriter} from "./RelationWriter.js"; import {createMockStorage} from "../../../../mocks/Storage.js"; import {FragmentBoundaryEntry} from "../entries/FragmentBoundaryEntry.js"; import {createEvent, withTextBody, withContent, withSender} from "../../../../mocks/event.js"; +import {NullLogger} from "../../../../logging/NullLogger.js"; export function tests() { const alice = "alice@hs.tdl"; @@ -346,7 +347,16 @@ export function tests() { assert.equal(fragment1.nextId, fragment2.id); assert.equal(fragment2.previousId, fragment1.id); assert.equal(fragment2.previousToken, null); - assert.equal(fragment2.nextToken, null); + assert.equal(fragment1.nextToken, null); + } + + async function assertWeakLink(assert, txn, fragmentId1, fragmentId2) { + const fragment1 = await txn.timelineFragments.get(roomId, fragmentId1); + const fragment2 = await txn.timelineFragments.get(roomId, fragmentId2); + assert.equal(fragment1.nextId, fragment2.id); + assert.equal(fragment2.previousId, fragment1.id); + assert.notEqual(fragment2.previousToken, null); + assert.notEqual(fragment1.nextToken, null); } return { @@ -390,7 +400,7 @@ export function tests() { await txn.complete() }, - "Backfilling a fragment that is expected to link up": async assert => { + "Backfilling a fragment that is expected to link up, and does": async assert => { const { txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); const existingFragment = await createFragment(0, txn, fragmentIdComparer, { nextId: 1, nextToken: startToken }); const liveFragment = await createFragment(1, txn, fragmentIdComparer, { previousId: 0, previousToken: startToken }); @@ -414,6 +424,29 @@ export function tests() { } await assertTightLink(assert, txn, 0, 1); }, + "Backfilling a fragment that is expected to link up, but doesn't yet": async assert => { + const { txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); + const existingFragment = await createFragment(0, txn, fragmentIdComparer, { nextId: 1, nextToken: endToken }); + const liveFragment = await createFragment(1, txn, fragmentIdComparer, { previousId: 0, previousToken: startToken }); + const newEntry = FragmentBoundaryEntry.start(liveFragment, fragmentIdComparer); + + const initialEntries = await prefillFragment(txn, eventCreator, existingFragment, 10); + const response = eventCreator.createMessagesResponse(); + await gapWriter.writeFragmentFill(newEntry, response, txn, null); + + const allEvents = await txn.timelineEvents._timelineStore.selectAll(); + let i = 0; + for (const initialEntry of initialEntries) { + const storedEvent = allEvents[i++]; + assert.deepEqual(initialEntry, storedEvent.event); + } + for (let j = 0; j < response.chunk.length - 1; j++) { + const responseEvent = response.chunk.at(-j - 1); + const storedEvent = allEvents[i + j]; + assert.deepEqual(responseEvent, storedEvent.event); + } + await assertWeakLink(assert, txn, 0, 1); + }, "Backfilling a fragment that is not expected to link up": async assert => { const { txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); const existingFragment = await createFragment(0, txn, fragmentIdComparer, { nextToken: startToken }); @@ -437,6 +470,50 @@ export function tests() { assert.deepEqual(responseEvent, storedEvent.event); } await assertTightLink(assert, txn, 0, 1); - } + }, + "Receiving a sync with the same events as the current fragment does not create infinite link": async assert => { + const { txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); + const liveFragment = await createFragment(0, txn, fragmentIdComparer, { previousToken: startToken }); + const newEntry = FragmentBoundaryEntry.start(liveFragment, fragmentIdComparer); + + const initialEntries = await prefillFragment(txn, eventCreator, liveFragment, 10); + const response = { start: startToken, end: endToken, chunk: initialEntries.slice().reverse(), state: [] }; + await gapWriter.writeFragmentFill(newEntry, response, txn, new NullLogger()); + + const updatedLiveFragment = txn.timelineFragments.get(roomId, 0); + assert.equal(updatedLiveFragment.previousId, null); + const allEvents = await txn.timelineEvents._timelineStore.selectAll(); + let i = 0; + for (const initialEntry of initialEntries) { + assert.deepEqual(allEvents[i++].event, initialEntry); + } + assert.equal(allEvents.length, 10); + }, + "An event received by sync does not interrupt backfilling": async assert => { + const { txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); + const existingFragment = await createFragment(0, txn, fragmentIdComparer, { nextId: 1, nextToken: endToken }); + const liveFragment = await createFragment(1, txn, fragmentIdComparer, { previousId: 0, previousToken: startToken }); + const anotherFragment = await createFragment(2, txn, fragmentIdComparer); + const newEntry = FragmentBoundaryEntry.start(liveFragment, fragmentIdComparer); + + const initialEntries = await prefillFragment(txn, eventCreator, existingFragment, 10); + const [strayEntry] = await prefillFragment(txn, eventCreator, anotherFragment, 1); + const response = eventCreator.createMessagesResponse(); + const originalEntries = response.chunk.slice(); + response.chunk.splice(response.chunk.length - 3, 0, initialEntries[5], strayEntry); + await gapWriter.writeFragmentFill(newEntry, response, txn, null); + + const allEvents = await txn.timelineEvents._timelineStore.selectAll(); + let i = 0; + for (const initialEntry of initialEntries) { + const storedEvent = allEvents[i++]; + assert.deepEqual(initialEntry, storedEvent.event); + } + for (const originalEntry of originalEntries.reverse()) { + const storedEvent = allEvents[i++]; + assert.deepEqual(originalEntry, storedEvent.event); + } + await assertWeakLink(assert, txn, 0, 1); + }, } } From df273c5e2c2846ea5ec0125af0c1239deafea031 Mon Sep 17 00:00:00 2001 From: Danila Fedorin Date: Tue, 14 Sep 2021 15:40:15 -0700 Subject: [PATCH 08/19] Store more events from backfill --- .../room/timeline/persistence/GapWriter.js | 30 ++++++++++--------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/src/matrix/room/timeline/persistence/GapWriter.js b/src/matrix/room/timeline/persistence/GapWriter.js index 1a40f23e..f5108f3c 100644 --- a/src/matrix/room/timeline/persistence/GapWriter.js +++ b/src/matrix/room/timeline/persistence/GapWriter.js @@ -47,28 +47,30 @@ export class GapWriter { } nonOverlappingEvents.push(...remainingEvents.slice(0, duplicateEventIndex)); if (!expectedOverlappingEventId || duplicateEventId === expectedOverlappingEventId) { - // TODO: check here that the neighbourEvent is at the correct edge of it's fragment - // get neighbour fragment to link it up later on - const neighbourEvent = await txn.timelineEvents.getByEventId(this._roomId, duplicateEventId); - if (neighbourEvent.fragmentId === fragmentEntry.fragmentId) { - log.log("hit #160, prevent fragment linking to itself", log.level.Warn); - } else { + // Only link fragment if this is the first overlapping fragment we discover. + // TODO is this sufficient? Might we get "out of order" fragments from events? + if (!neighbourFragmentEntry) { + // TODO: check here that the neighbourEvent is at the correct edge of it's fragment + // get neighbour fragment to link it up later on + const neighbourEvent = await txn.timelineEvents.getByEventId(this._roomId, duplicateEventId); const neighbourFragment = await txn.timelineFragments.get(this._roomId, neighbourEvent.fragmentId); neighbourFragmentEntry = fragmentEntry.createNeighbourEntry(neighbourFragment); } - // trim overlapping events - remainingEvents = null; - } else { - // we've hit https://github.com/matrix-org/synapse/issues/7164, - // e.g. the event id we found is already in our store but it is not - // the adjacent fragment id. Ignore the event, but keep processing the ones after. - remainingEvents = remainingEvents.slice(duplicateEventIndex + 1); - } + } + // If more events remain, or if this wasn't the expected overlapping event, + // we've hit https://github.com/matrix-org/synapse/issues/7164, + // e.g. the event id we found is already in our store but it is not + // the adjacent fragment id. Ignore the event, but keep processing the ones after. + remainingEvents = remainingEvents.slice(duplicateEventIndex + 1); } else { nonOverlappingEvents.push(...remainingEvents); remainingEvents = null; } } + if (neighbourFragmentEntry?.fragmentId === fragmentEntry.fragmentId) { + log.log("hit #160, prevent fragment linking to itself", log.level.Warn); + neighbourFragmentEntry = null; + } return {nonOverlappingEvents, neighbourFragmentEntry}; } From d2b604e1dd1d343e6ebb890af5a22f33efc5a8ef Mon Sep 17 00:00:00 2001 From: Danila Fedorin Date: Tue, 14 Sep 2021 15:57:19 -0700 Subject: [PATCH 09/19] Stop using `at` to fix tests. --- src/matrix/room/timeline/persistence/GapWriter.js | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/matrix/room/timeline/persistence/GapWriter.js b/src/matrix/room/timeline/persistence/GapWriter.js index f5108f3c..16412a95 100644 --- a/src/matrix/room/timeline/persistence/GapWriter.js +++ b/src/matrix/room/timeline/persistence/GapWriter.js @@ -372,7 +372,7 @@ export function tests() { const allEvents = await txn.timelineEvents.eventsAfter(roomId, EventKey.minKey, 100 /* fetch all */); for (let i = 0; i < response.chunk.length; i++) { - const responseEvent = response.chunk.at(-i - 1); + const responseEvent = response.chunk[response.chunk.length -i - 1]; const storedEvent = allEvents[i]; assert.deepEqual(responseEvent, storedEvent.event); } @@ -391,7 +391,7 @@ export function tests() { const allEvents = await txn.timelineEvents.eventsAfter(roomId, EventKey.minKey, 100 /* fetch all */); let i = 0; for (; i < response.chunk.length; i++) { - const responseEvent = response.chunk.at(-i - 1); + const responseEvent = response.chunk[response.chunk.length -i - 1]; const storedEvent = allEvents[i]; assert.deepEqual(responseEvent, storedEvent.event); } @@ -410,7 +410,7 @@ export function tests() { const initialEntries = await prefillFragment(txn, eventCreator, existingFragment, 10); const response = eventCreator.createMessagesResponse(); - response.chunk.push(initialEntries.at(-1)); /* Expect overlap */ + response.chunk.push(initialEntries[initialEntries.length-1]); /* Expect overlap */ await gapWriter.writeFragmentFill(newEntry, response, txn, null); const allEvents = await txn.timelineEvents._timelineStore.selectAll(); @@ -420,7 +420,7 @@ export function tests() { assert.deepEqual(initialEntry, storedEvent.event); } for (let j = 0; j < response.chunk.length - 1; j++) { - const responseEvent = response.chunk.at(-j - 2); + const responseEvent = response.chunk[response.chunk.length -j - 2]; const storedEvent = allEvents[i + j]; assert.deepEqual(responseEvent, storedEvent.event); } @@ -443,7 +443,7 @@ export function tests() { assert.deepEqual(initialEntry, storedEvent.event); } for (let j = 0; j < response.chunk.length - 1; j++) { - const responseEvent = response.chunk.at(-j - 1); + const responseEvent = response.chunk[response.chunk.length - j - 1]; const storedEvent = allEvents[i + j]; assert.deepEqual(responseEvent, storedEvent.event); } @@ -457,7 +457,7 @@ export function tests() { const initialEntries = await prefillFragment(txn, eventCreator, existingFragment, 10); const response = eventCreator.createMessagesResponse(); - response.chunk.push(initialEntries.at(-1)); /* Fake overlap */ + response.chunk.push(initialEntries[initialEntries.length-1]); /* Fake overlap */ await gapWriter.writeFragmentFill(newEntry, response, txn, null); const allEvents = await txn.timelineEvents._timelineStore.selectAll(); @@ -467,7 +467,7 @@ export function tests() { assert.deepEqual(initialEntry, storedEvent.event); } for (let j = 0; j < response.chunk.length - 1; j++) { - const responseEvent = response.chunk.at(-j - 2); + const responseEvent = response.chunk[response.chunk.length -j - 2]; const storedEvent = allEvents[i + j]; assert.deepEqual(responseEvent, storedEvent.event); } From bbd174cd67db238613182b8291f4168213757ba0 Mon Sep 17 00:00:00 2001 From: Danila Fedorin Date: Wed, 15 Sep 2021 16:15:18 -0700 Subject: [PATCH 10/19] Add a class to mock timeline requests --- src/mocks/TimelineMock.ts | 255 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 255 insertions(+) create mode 100644 src/mocks/TimelineMock.ts diff --git a/src/mocks/TimelineMock.ts b/src/mocks/TimelineMock.ts new file mode 100644 index 00000000..4932e61e --- /dev/null +++ b/src/mocks/TimelineMock.ts @@ -0,0 +1,255 @@ +import {createEvent, withTextBody, withSender} from "./event.js"; +import {TimelineEvent} from "../matrix/storage/types"; + +export const TIMELINE_START_TOKEN = "timeline_start"; + +export class TimelineMock { + private _counter: number; + private _dagOrder: TimelineEvent[]; + private _syncOrder: TimelineEvent[]; + private _defaultSender: string; + + constructor(defaultSender: string) { + this._counter = 0; + this._dagOrder = []; + this._syncOrder = []; + this._defaultSender = defaultSender; + } + + _defaultEvent(id: string): TimelineEvent { + return withTextBody(`This is event ${id}`, withSender(this._defaultSender, createEvent("m.room.message", id))); + } + + _createEvent(func?: (eventId: string) => TimelineEvent): TimelineEvent { + const id = `$event${this._counter++}`; + return func ? func(id) : this._defaultEvent(id); + } + + _createEvents(n: number, func?: (eventId: string) => TimelineEvent) { + const events: TimelineEvent[] = []; + for (let i = 0; i < n; i++) { + events.push(this._createEvent(func)); + } + return events; + } + + insertAfter(token: string, n: number, func?: (eventId: string) => TimelineEvent) { + const events = this._createEvents(n, func); + const index = this._findIndex(token, "f", this._dagOrder); + this._dagOrder.splice(index, 0, ...events); + this._syncOrder.push(...events); + return events[events.length - 1]?.event_id; + } + + append(n: number, func?: (eventId: string) => TimelineEvent) { + const events = this._createEvents(n, func); + this._dagOrder.push(...events); + this._syncOrder.push(...events); + return events[events.length - 1]?.event_id; + } + + _getStep(direction: "f" | "b") : 1 | -1 { + return direction === "f" ? 1 : -1; + } + + _findIndex(token: string, direction: "f" | "b", eventOrdering: TimelineEvent[]): number { + const step = this._getStep(direction); + if (token === TIMELINE_START_TOKEN) { + const firstSyncEvent = this._syncOrder[0]; + if (!firstSyncEvent) { + // We have no events at all. Wherever you start looking, + // you'll stop looking right away. Zero works as well as anything else. + return 0; + } + const orderIndex = eventOrdering.findIndex(e => e.event_id === firstSyncEvent.event_id); + return orderIndex; + } + // All other tokens are (non-inclusive) event indices + const index = eventOrdering.findIndex(e => e.event_id === token); + if (index === -1) { + // We didn't find this event token at all. What are we + // even looking at? + throw new Error("Invalid token passed to TimelineMock"); + } + return index + step; + } + + messages(begin: string, end: string | undefined, direction: "f" | "b", limit: number = 10) { + const step = this._getStep(direction); + let index = this._findIndex(begin, direction, this._dagOrder); + const chunk: TimelineEvent[] = []; + for (; limit > 0 && index >= 0 && index < this._dagOrder.length; index += step, limit--) { + if (this._dagOrder[index].event_id === end) { + break; + } + chunk.push(this._dagOrder[index]); + } + return { + start: begin, + end: chunk[chunk.length - 1]?.event_id || begin, + chunk, + state: [] + }; + } + + context(eventId: string, limit: number = 10) { + if (limit <= 0) { + throw new Error("Cannot fetch zero or less events!"); + } + let eventIndex = this._dagOrder.findIndex(e => e.event_id === eventId); + if (eventIndex === -1) { + throw new Error("Fetching context for unknown event"); + } + const event = this._dagOrder[eventIndex]; + limit -= 1; + let offset = 1; + const eventsBefore: TimelineEvent[] = []; + const eventsAfter: TimelineEvent[] = []; + while (limit !== 0 && (eventIndex - offset >= 0 || eventIndex + offset < this._dagOrder.length)) { + if (eventIndex - offset >= 0) { + eventsBefore.push(this._dagOrder[eventIndex - offset]); + limit--; + } + if (limit !== 0 && eventIndex + offset < this._dagOrder.length) { + eventsAfter.push(this._dagOrder[eventIndex + offset]); + limit--; + } + offset++; + } + return { + start: eventsBefore[eventsBefore.length - 1]?.event_id || eventId, + end: eventsAfter[eventsAfter.length - 1]?.event_id || eventId, + event, + events_before: eventsBefore, + events_after: eventsAfter, + state: [] + }; + } + + sync(since?: string, limit: number = 10) { + const startAt = since ? this._findIndex(since, "f", this._syncOrder) : 0; + const index = Math.max(this._syncOrder.length - limit, startAt); + const limited = this._syncOrder.length - startAt > limit; + const events: TimelineEvent[] = []; + for(let i = index; i < this._syncOrder.length; i++) { + events.push(this._syncOrder[i]); + } + return { + next_batch: events[events.length - 1]?.event_id || since || TIMELINE_START_TOKEN, + prev_batch: events[0]?.event_id || since || TIMELINE_START_TOKEN, + events, + limited + } + } +} + +export function tests() { + const SENDER = "@alice:hs.tdl"; + + function eventId(i: number): string { + return `$event${i}`; + } + + function eventIds(from: number, to: number): string[] { + return [...Array(to-from).keys()].map(i => eventId(i + from)); + } + + return { + "Append events are returned via sync": assert => { + const timeline = new TimelineMock(SENDER); + timeline.append(10); + const syncResponse = timeline.sync(); + const events = syncResponse.events.map(e => e.event_id); + assert.deepEqual(events, eventIds(0, 10)); + assert.equal(syncResponse.limited, false); + }, + "Limiting a sync properly limits the returned events": assert => { + const timeline = new TimelineMock(SENDER); + timeline.append(20); + const syncResponse = timeline.sync(undefined, 10); + const events = syncResponse.events.map(e => e.event_id); + assert.deepEqual(events, eventIds(10, 20)); + assert.equal(syncResponse.limited, true); + }, + "The context endpoint returns messages in DAG order around an event": assert => { + const timeline = new TimelineMock(SENDER); + timeline.append(30); + const context = timeline.context(eventId(15)); + assert.equal(context.event.event_id, eventId(15)); + assert.deepEqual(context.events_before.map(e => e.event_id).reverse(), eventIds(10, 15)); + assert.deepEqual(context.events_after.map(e => e.event_id), eventIds(16, 20)); + }, + "The context endpoint returns the proper number of messages": assert => { + const timeline = new TimelineMock(SENDER); + timeline.append(30); + for (const i of new Array(29).keys()) { + const middleFetch = timeline.context(eventId(15), i + 1); + assert.equal(middleFetch.events_before.length + middleFetch.events_after.length + 1, i + 1); + const startFetch = timeline.context(eventId(1), i + 1); + assert.equal(startFetch.events_before.length + startFetch.events_after.length + 1, i + 1); + const endFetch = timeline.context(eventId(28), i + 1); + assert.equal(endFetch.events_before.length + endFetch.events_after.length + 1, i + 1); + } + }, + "The previous batch from a sync returns the previous events": assert => { + const timeline = new TimelineMock(SENDER); + timeline.append(20); + const sync = timeline.sync(undefined, 10); + const messages = timeline.messages(sync.prev_batch, undefined, "b"); + const events = messages.chunk.map(e => e.event_id).reverse(); + assert.deepEqual(events, eventIds(0, 10)); + }, + "Two consecutive message fetches are continuous if no new events are inserted": assert => { + const timeline = new TimelineMock(SENDER); + timeline.append(30); + const sync = timeline.sync(undefined, 10); + const messages1 = timeline.messages(sync.prev_batch, undefined, "b"); + const events1 = messages1.chunk.map(e => e.event_id).reverse(); + const messages2 = timeline.messages(messages1.end, undefined, "b"); + const events2 = messages2.chunk.map(e => e.event_id).reverse(); + assert.deepEqual(events1, eventIds(10, 20)); + assert.deepEqual(events2, eventIds(0, 10)); + }, + "Two consecutive message fetches detect newly inserted event": assert => { + const timeline = new TimelineMock(SENDER); + timeline.append(30); + const messages1 = timeline.messages(eventId(20), undefined, "b", 10); + const events1 = messages1.chunk.map(e => e.event_id).reverse(); + timeline.insertAfter(eventId(9), 1); + const messages2 = timeline.messages(eventId(10), undefined, "b", 10); + const events2 = messages2.chunk.map(e => e.event_id).reverse(); + assert.deepEqual(events1, eventIds(10, 20)); + const expectedEvents2 = eventIds(1, 10); + expectedEvents2.push(eventId(30)); + assert.deepEqual(events2, expectedEvents2); + }, + "A sync that receives no events has the same next batch as it started with": assert => { + const timeline = new TimelineMock(SENDER); + timeline.append(10); + const sync1 = timeline.sync(); + const sync2 = timeline.sync(sync1.next_batch); + assert.equal(sync1.next_batch, sync2.next_batch); + }, + "An event inserted in the midle still shows up in a sync": assert => { + const timeline = new TimelineMock(SENDER); + timeline.append(30); + const sync1 = timeline.sync(undefined, 10); + const sync2 = timeline.sync(sync1.next_batch, 10) + assert.deepEqual(sync2.events, []); + assert.equal(sync2.limited, false); + timeline.insertAfter(TIMELINE_START_TOKEN, 1); + const sync3 = timeline.sync(sync2.next_batch, 10) + const events = sync3.events.map(e => e.event_id); + assert.deepEqual(events, eventIds(30, 31)); + }, + "An event inserted in the midle does not show up in a message fetch": assert => { + const timeline = new TimelineMock(SENDER); + timeline.append(30); + const sync1 = timeline.sync(undefined, 10); + const messages1 = timeline.messages(sync1.prev_batch, undefined, "f", 10); + timeline.insertAfter(TIMELINE_START_TOKEN, 1); + const messages2 = timeline.messages(sync1.prev_batch, undefined, "f", 10); + assert.deepEqual(messages1.chunk, messages2.chunk); + }, + } +} From bcfca9ad9ae281269f8c120bd0d2b2039c2bc160 Mon Sep 17 00:00:00 2001 From: Danila Fedorin Date: Thu, 16 Sep 2021 21:53:56 -0700 Subject: [PATCH 11/19] Make event ID function public --- src/mocks/TimelineMock.ts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/mocks/TimelineMock.ts b/src/mocks/TimelineMock.ts index 4932e61e..d4581142 100644 --- a/src/mocks/TimelineMock.ts +++ b/src/mocks/TimelineMock.ts @@ -3,6 +3,10 @@ import {TimelineEvent} from "../matrix/storage/types"; export const TIMELINE_START_TOKEN = "timeline_start"; +export function eventId(i: number): string { + return `$event${i}`; +} + export class TimelineMock { private _counter: number; private _dagOrder: TimelineEvent[]; @@ -21,7 +25,7 @@ export class TimelineMock { } _createEvent(func?: (eventId: string) => TimelineEvent): TimelineEvent { - const id = `$event${this._counter++}`; + const id = eventId(this._counter++); return func ? func(id) : this._defaultEvent(id); } @@ -146,10 +150,6 @@ export class TimelineMock { export function tests() { const SENDER = "@alice:hs.tdl"; - function eventId(i: number): string { - return `$event${i}`; - } - function eventIds(from: number, to: number): string[] { return [...Array(to-from).keys()].map(i => eventId(i + from)); } From 7d27b46873db0aca433a07f0ce8dd9ea910cdef1 Mon Sep 17 00:00:00 2001 From: Danila Fedorin Date: Thu, 16 Sep 2021 23:53:38 -0700 Subject: [PATCH 12/19] Make the response of TimelineMock look like a room sync response --- src/mocks/TimelineMock.ts | 38 ++++++++++++++++++++------------------ 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/src/mocks/TimelineMock.ts b/src/mocks/TimelineMock.ts index d4581142..573bbf5b 100644 --- a/src/mocks/TimelineMock.ts +++ b/src/mocks/TimelineMock.ts @@ -7,6 +7,10 @@ export function eventId(i: number): string { return `$event${i}`; } +export function eventIds(from: number, to: number): string[] { + return [...Array(to-from).keys()].map(i => eventId(i + from)); +} + export class TimelineMock { private _counter: number; private _dagOrder: TimelineEvent[]; @@ -140,9 +144,11 @@ export class TimelineMock { } return { next_batch: events[events.length - 1]?.event_id || since || TIMELINE_START_TOKEN, - prev_batch: events[0]?.event_id || since || TIMELINE_START_TOKEN, - events, - limited + timeline: { + prev_batch: events[0]?.event_id || since || TIMELINE_START_TOKEN, + events, + limited + } } } } @@ -150,26 +156,22 @@ export class TimelineMock { export function tests() { const SENDER = "@alice:hs.tdl"; - function eventIds(from: number, to: number): string[] { - return [...Array(to-from).keys()].map(i => eventId(i + from)); - } - return { "Append events are returned via sync": assert => { const timeline = new TimelineMock(SENDER); timeline.append(10); const syncResponse = timeline.sync(); - const events = syncResponse.events.map(e => e.event_id); + const events = syncResponse.timeline.events.map(e => e.event_id); assert.deepEqual(events, eventIds(0, 10)); - assert.equal(syncResponse.limited, false); + assert.equal(syncResponse.timeline.limited, false); }, "Limiting a sync properly limits the returned events": assert => { const timeline = new TimelineMock(SENDER); timeline.append(20); const syncResponse = timeline.sync(undefined, 10); - const events = syncResponse.events.map(e => e.event_id); + const events = syncResponse.timeline.events.map(e => e.event_id); assert.deepEqual(events, eventIds(10, 20)); - assert.equal(syncResponse.limited, true); + assert.equal(syncResponse.timeline.limited, true); }, "The context endpoint returns messages in DAG order around an event": assert => { const timeline = new TimelineMock(SENDER); @@ -195,7 +197,7 @@ export function tests() { const timeline = new TimelineMock(SENDER); timeline.append(20); const sync = timeline.sync(undefined, 10); - const messages = timeline.messages(sync.prev_batch, undefined, "b"); + const messages = timeline.messages(sync.timeline.prev_batch, undefined, "b"); const events = messages.chunk.map(e => e.event_id).reverse(); assert.deepEqual(events, eventIds(0, 10)); }, @@ -203,7 +205,7 @@ export function tests() { const timeline = new TimelineMock(SENDER); timeline.append(30); const sync = timeline.sync(undefined, 10); - const messages1 = timeline.messages(sync.prev_batch, undefined, "b"); + const messages1 = timeline.messages(sync.timeline.prev_batch, undefined, "b"); const events1 = messages1.chunk.map(e => e.event_id).reverse(); const messages2 = timeline.messages(messages1.end, undefined, "b"); const events2 = messages2.chunk.map(e => e.event_id).reverse(); @@ -235,20 +237,20 @@ export function tests() { timeline.append(30); const sync1 = timeline.sync(undefined, 10); const sync2 = timeline.sync(sync1.next_batch, 10) - assert.deepEqual(sync2.events, []); - assert.equal(sync2.limited, false); + assert.deepEqual(sync2.timeline.events, []); + assert.equal(sync2.timeline.limited, false); timeline.insertAfter(TIMELINE_START_TOKEN, 1); const sync3 = timeline.sync(sync2.next_batch, 10) - const events = sync3.events.map(e => e.event_id); + const events = sync3.timeline.events.map(e => e.event_id); assert.deepEqual(events, eventIds(30, 31)); }, "An event inserted in the midle does not show up in a message fetch": assert => { const timeline = new TimelineMock(SENDER); timeline.append(30); const sync1 = timeline.sync(undefined, 10); - const messages1 = timeline.messages(sync1.prev_batch, undefined, "f", 10); + const messages1 = timeline.messages(sync1.timeline.prev_batch, undefined, "f", 10); timeline.insertAfter(TIMELINE_START_TOKEN, 1); - const messages2 = timeline.messages(sync1.prev_batch, undefined, "f", 10); + const messages2 = timeline.messages(sync1.timeline.prev_batch, undefined, "f", 10); assert.deepEqual(messages1.chunk, messages2.chunk); }, } From 82c35355b655f565e9e79257aff122e5c61dd2c4 Mon Sep 17 00:00:00 2001 From: Danila Fedorin Date: Thu, 16 Sep 2021 23:54:13 -0700 Subject: [PATCH 13/19] Start translating GapWriter tests to using MockTimeline --- .../room/timeline/persistence/GapWriter.js | 295 ++++++------------ 1 file changed, 87 insertions(+), 208 deletions(-) diff --git a/src/matrix/room/timeline/persistence/GapWriter.js b/src/matrix/room/timeline/persistence/GapWriter.js index 16412a95..61e49d65 100644 --- a/src/matrix/room/timeline/persistence/GapWriter.js +++ b/src/matrix/room/timeline/persistence/GapWriter.js @@ -260,46 +260,20 @@ import {RelationWriter} from "./RelationWriter.js"; import {createMockStorage} from "../../../../mocks/Storage.js"; import {FragmentBoundaryEntry} from "../entries/FragmentBoundaryEntry.js"; import {createEvent, withTextBody, withContent, withSender} from "../../../../mocks/event.js"; -import {NullLogger} from "../../../../logging/NullLogger.js"; +import {NullLogItem} from "../../../../logging/NullLogger.js"; +import {TimelineMock, eventIds} from "../../../../mocks/TimelineMock.ts"; +import {SyncWriter} from "./SyncWriter.js"; +import {MemberWriter} from "./MemberWriter.js"; +import {KeyLimits} from "../../../storage/common"; export function tests() { - const alice = "alice@hs.tdl"; - const bob = "bob@hs.tdl"; const roomId = "!room:hs.tdl"; - const startToken = "begin_token"; - const endToken = "end_token"; - - class EventCreator { - constructor() { - this.counter = 0; - } - - nextEvent() { - const event = withTextBody(`This is event ${this.counter}`, withSender(bob, createEvent("m.room.message", `!event${this.counter}`))); - this.counter++; - return event; - } - - nextEvents(n) { - const events = []; - for (let i = 0; i < n; i++) { - events.push(this.nextEvent()); - } - return events; - } - - createMessagesResponse() { - return { - start: startToken, - end: endToken, - chunk: this.nextEvents(5), - state: [] - } - } - } + const alice = "alice@hs.tdl"; + const logger = new NullLogItem(); async function createGapFillTxn(storage) { return storage.readWriteTxn([ + storage.storeNames.roomMembers, storage.storeNames.pendingEvents, storage.storeNames.timelineEvents, storage.storeNames.timelineRelations, @@ -317,205 +291,110 @@ export function tests() { const gapWriter = new GapWriter({ roomId, storage, fragmentIdComparer, relationWriter }); - return { storage, txn, fragmentIdComparer, gapWriter, eventCreator: new EventCreator() }; - } - - async function createFragment(id, txn, fragmentIdComparer, overrides = {}) { - const newFragment = Object.assign({ - roomId, id, - previousId: null, - nextId: null, - nextToken: null, - previousToken: null - }, overrides); - await txn.timelineFragments.add(newFragment); - fragmentIdComparer.add(newFragment); - return newFragment; - } - - function prefillFragment(txn, eventCreator, fragment, n) { - let initialKey = EventKey.defaultFragmentKey(fragment.id); - const initialEntries = eventCreator.nextEvents(n); - initialEntries.forEach(e => { - txn.timelineEvents.insert(createEventEntry(initialKey, roomId, e)) - initialKey = initialKey.nextKey(); + const memberWriter = new MemberWriter(roomId); + const syncWriter = new SyncWriter({ + roomId, + fragmentIdComparer, + memberWriter, + relationWriter }); - return initialEntries; + return { storage, txn, fragmentIdComparer, gapWriter, syncWriter, timelineMock: new TimelineMock() }; } - async function assertTightLink(assert, txn, fragmentId1, fragmentId2) { - const fragment1 = await txn.timelineFragments.get(roomId, fragmentId1); - const fragment2 = await txn.timelineFragments.get(roomId, fragmentId2); + async function syncAndWrite(mocks, previousResponse) { + const {txn, timelineMock, syncWriter, fragmentIdComparer} = mocks; + const syncResponse = timelineMock.sync(previousResponse?.next_batch); + console.log(syncResponse.timeline.events); + const {newLiveKey} = await syncWriter.writeSync(syncResponse, false, false, txn, logger); + syncWriter.afterSync(newLiveKey); + return { + syncResponse, + fragmentEntry: newLiveKey ? FragmentBoundaryEntry.start( + await txn.timelineFragments.get(roomId, newLiveKey.fragmentId), + fragmentIdComparer, + ) : null, + }; + } + + async function backfillAndWrite(mocks, fragmentEntry) { + const {txn, timelineMock, gapWriter} = mocks; + const messageResponse = timelineMock.messages(fragmentEntry.token, undefined, fragmentEntry.direction.asApiString()); + await gapWriter.writeFragmentFill(fragmentEntry, messageResponse, txn, logger); + } + + async function allFragmentEvents(mocks, fragmentId) { + const {txn} = mocks; + const entries = await txn.timelineEvents.eventsAfter(roomId, new EventKey(fragmentId, KeyLimits.minStorageKey)); + return entries.map(e => e.event); + } + + async function fetchFragment(mocks, fragmentId) { + const {txn} = mocks; + return txn.timelineFragments.get(roomId, fragmentId); + } + + function assertDeepLink(assert, fragment1, fragment2) { assert.equal(fragment1.nextId, fragment2.id); assert.equal(fragment2.previousId, fragment1.id); - assert.equal(fragment2.previousToken, null); assert.equal(fragment1.nextToken, null); + assert.equal(fragment2.previousToken, null); } - async function assertWeakLink(assert, txn, fragmentId1, fragmentId2) { - const fragment1 = await txn.timelineFragments.get(roomId, fragmentId1); - const fragment2 = await txn.timelineFragments.get(roomId, fragmentId2); + function assertShallowLink(assert, fragment1, fragment2) { assert.equal(fragment1.nextId, fragment2.id); assert.equal(fragment2.previousId, fragment1.id); assert.notEqual(fragment2.previousToken, null); - assert.notEqual(fragment1.nextToken, null); } return { - "Backfilling an empty fragment": async assert => { - const { txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); - const emptyFragment = await createFragment(0, txn, fragmentIdComparer, { previousToken: startToken }); - const newEntry = FragmentBoundaryEntry.start(emptyFragment, fragmentIdComparer); - - const response = eventCreator.createMessagesResponse(); - await gapWriter.writeFragmentFill(newEntry, response, txn, null); - - const allEvents = await txn.timelineEvents.eventsAfter(roomId, EventKey.minKey, 100 /* fetch all */); - for (let i = 0; i < response.chunk.length; i++) { - const responseEvent = response.chunk[response.chunk.length -i - 1]; - const storedEvent = allEvents[i]; - assert.deepEqual(responseEvent, storedEvent.event); - } - await txn.complete(); - }, - "Backfilling a fragment with existing entries": async assert => { - const { txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); - const liveFragment = await createFragment(0, txn, fragmentIdComparer, { previousToken: startToken }); - const newEntry = FragmentBoundaryEntry.start(liveFragment, fragmentIdComparer); - - const initialEntries = await prefillFragment(txn, eventCreator, liveFragment, 10); - - const response = eventCreator.createMessagesResponse(); - await gapWriter.writeFragmentFill(newEntry, response, txn, null); - - const allEvents = await txn.timelineEvents.eventsAfter(roomId, EventKey.minKey, 100 /* fetch all */); - let i = 0; - for (; i < response.chunk.length; i++) { - const responseEvent = response.chunk[response.chunk.length -i - 1]; - const storedEvent = allEvents[i]; - assert.deepEqual(responseEvent, storedEvent.event); - } - for (const initialEntry of initialEntries) { - const storedEvent = allEvents[i++]; - assert.deepEqual(initialEntry, storedEvent.event); - } - - await txn.complete() + "Backfilling after one sync": async assert => { + const mocks = await setup(); + const { timelineMock } = mocks; + timelineMock.append(30); + const {fragmentEntry} = await syncAndWrite(mocks); + await backfillAndWrite(mocks, fragmentEntry); + const events = await allFragmentEvents(mocks, fragmentEntry.fragmentId); + assert.deepEqual(events.map(e => e.event_id), eventIds(10, 30)); }, "Backfilling a fragment that is expected to link up, and does": async assert => { - const { txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); - const existingFragment = await createFragment(0, txn, fragmentIdComparer, { nextId: 1, nextToken: startToken }); - const liveFragment = await createFragment(1, txn, fragmentIdComparer, { previousId: 0, previousToken: startToken }); - const newEntry = FragmentBoundaryEntry.start(liveFragment, fragmentIdComparer); + const mocks = await setup(); + const { timelineMock } = mocks; + timelineMock.append(10); + const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks); + timelineMock.append(15); + const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, syncResponse); + await backfillAndWrite(mocks, secondFragmentEntry); - const initialEntries = await prefillFragment(txn, eventCreator, existingFragment, 10); - const response = eventCreator.createMessagesResponse(); - response.chunk.push(initialEntries[initialEntries.length-1]); /* Expect overlap */ - await gapWriter.writeFragmentFill(newEntry, response, txn, null); - - const allEvents = await txn.timelineEvents._timelineStore.selectAll(); - let i = 0; - for (const initialEntry of initialEntries) { - const storedEvent = allEvents[i++]; - assert.deepEqual(initialEntry, storedEvent.event); - } - for (let j = 0; j < response.chunk.length - 1; j++) { - const responseEvent = response.chunk[response.chunk.length -j - 2]; - const storedEvent = allEvents[i + j]; - assert.deepEqual(responseEvent, storedEvent.event); - } - await assertTightLink(assert, txn, 0, 1); + const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId); + const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId); + assertDeepLink(assert, firstFragment, secondFragment) + const firstEvents = await allFragmentEvents(mocks, firstFragmentEntry.fragmentId); + assert.deepEqual(firstEvents.map(e => e.event_id), eventIds(0, 10)); + const secondEvents = await allFragmentEvents(mocks, secondFragmentEntry.fragmentId); + assert.deepEqual(secondEvents.map(e => e.event_id), eventIds(10, 25)); }, "Backfilling a fragment that is expected to link up, but doesn't yet": async assert => { - const { txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); - const existingFragment = await createFragment(0, txn, fragmentIdComparer, { nextId: 1, nextToken: endToken }); - const liveFragment = await createFragment(1, txn, fragmentIdComparer, { previousId: 0, previousToken: startToken }); - const newEntry = FragmentBoundaryEntry.start(liveFragment, fragmentIdComparer); + const mocks = await setup(); + const { timelineMock } = mocks; + timelineMock.append(10); + const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks); + timelineMock.append(20); + const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, syncResponse); + await backfillAndWrite(mocks, secondFragmentEntry); - const initialEntries = await prefillFragment(txn, eventCreator, existingFragment, 10); - const response = eventCreator.createMessagesResponse(); - await gapWriter.writeFragmentFill(newEntry, response, txn, null); - - const allEvents = await txn.timelineEvents._timelineStore.selectAll(); - let i = 0; - for (const initialEntry of initialEntries) { - const storedEvent = allEvents[i++]; - assert.deepEqual(initialEntry, storedEvent.event); - } - for (let j = 0; j < response.chunk.length - 1; j++) { - const responseEvent = response.chunk[response.chunk.length - j - 1]; - const storedEvent = allEvents[i + j]; - assert.deepEqual(responseEvent, storedEvent.event); - } - await assertWeakLink(assert, txn, 0, 1); + const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId); + const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId); + assertShallowLink(assert, firstFragment, secondFragment) + const firstEvents = await allFragmentEvents(mocks, firstFragmentEntry.fragmentId); + assert.deepEqual(firstEvents.map(e => e.event_id), eventIds(0, 10)); + const secondEvents = await allFragmentEvents(mocks, secondFragmentEntry.fragmentId); + assert.deepEqual(secondEvents.map(e => e.event_id), eventIds(10, 30)); }, "Backfilling a fragment that is not expected to link up": async assert => { - const { txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); - const existingFragment = await createFragment(0, txn, fragmentIdComparer, { nextToken: startToken }); - const liveFragment = await createFragment(1, txn, fragmentIdComparer, { previousToken: startToken }); - const newEntry = FragmentBoundaryEntry.start(liveFragment, fragmentIdComparer); - - const initialEntries = await prefillFragment(txn, eventCreator, existingFragment, 10); - const response = eventCreator.createMessagesResponse(); - response.chunk.push(initialEntries[initialEntries.length-1]); /* Fake overlap */ - await gapWriter.writeFragmentFill(newEntry, response, txn, null); - - const allEvents = await txn.timelineEvents._timelineStore.selectAll(); - let i = 0; - for (const initialEntry of initialEntries) { - const storedEvent = allEvents[i++]; - assert.deepEqual(initialEntry, storedEvent.event); - } - for (let j = 0; j < response.chunk.length - 1; j++) { - const responseEvent = response.chunk[response.chunk.length -j - 2]; - const storedEvent = allEvents[i + j]; - assert.deepEqual(responseEvent, storedEvent.event); - } - await assertTightLink(assert, txn, 0, 1); }, "Receiving a sync with the same events as the current fragment does not create infinite link": async assert => { - const { txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); - const liveFragment = await createFragment(0, txn, fragmentIdComparer, { previousToken: startToken }); - const newEntry = FragmentBoundaryEntry.start(liveFragment, fragmentIdComparer); - - const initialEntries = await prefillFragment(txn, eventCreator, liveFragment, 10); - const response = { start: startToken, end: endToken, chunk: initialEntries.slice().reverse(), state: [] }; - await gapWriter.writeFragmentFill(newEntry, response, txn, new NullLogger()); - - const updatedLiveFragment = txn.timelineFragments.get(roomId, 0); - assert.equal(updatedLiveFragment.previousId, null); - const allEvents = await txn.timelineEvents._timelineStore.selectAll(); - let i = 0; - for (const initialEntry of initialEntries) { - assert.deepEqual(allEvents[i++].event, initialEntry); - } - assert.equal(allEvents.length, 10); }, "An event received by sync does not interrupt backfilling": async assert => { - const { txn, fragmentIdComparer, gapWriter, eventCreator } = await setup(); - const existingFragment = await createFragment(0, txn, fragmentIdComparer, { nextId: 1, nextToken: endToken }); - const liveFragment = await createFragment(1, txn, fragmentIdComparer, { previousId: 0, previousToken: startToken }); - const anotherFragment = await createFragment(2, txn, fragmentIdComparer); - const newEntry = FragmentBoundaryEntry.start(liveFragment, fragmentIdComparer); - - const initialEntries = await prefillFragment(txn, eventCreator, existingFragment, 10); - const [strayEntry] = await prefillFragment(txn, eventCreator, anotherFragment, 1); - const response = eventCreator.createMessagesResponse(); - const originalEntries = response.chunk.slice(); - response.chunk.splice(response.chunk.length - 3, 0, initialEntries[5], strayEntry); - await gapWriter.writeFragmentFill(newEntry, response, txn, null); - - const allEvents = await txn.timelineEvents._timelineStore.selectAll(); - let i = 0; - for (const initialEntry of initialEntries) { - const storedEvent = allEvents[i++]; - assert.deepEqual(initialEntry, storedEvent.event); - } - for (const originalEntry of originalEntries.reverse()) { - const storedEvent = allEvents[i++]; - assert.deepEqual(originalEntry, storedEvent.event); - } - await assertWeakLink(assert, txn, 0, 1); - }, + } } } From 820b048272103ead39b6ce002b3d67d3b12a5b21 Mon Sep 17 00:00:00 2001 From: Danila Fedorin Date: Fri, 17 Sep 2021 10:57:51 -0700 Subject: [PATCH 14/19] Finish up the more difficult tests --- .../room/timeline/persistence/GapWriter.js | 34 ++++++++++++++++--- 1 file changed, 29 insertions(+), 5 deletions(-) diff --git a/src/matrix/room/timeline/persistence/GapWriter.js b/src/matrix/room/timeline/persistence/GapWriter.js index 61e49d65..837d445f 100644 --- a/src/matrix/room/timeline/persistence/GapWriter.js +++ b/src/matrix/room/timeline/persistence/GapWriter.js @@ -259,9 +259,8 @@ import {FragmentIdComparer} from "../FragmentIdComparer.js"; import {RelationWriter} from "./RelationWriter.js"; import {createMockStorage} from "../../../../mocks/Storage.js"; import {FragmentBoundaryEntry} from "../entries/FragmentBoundaryEntry.js"; -import {createEvent, withTextBody, withContent, withSender} from "../../../../mocks/event.js"; import {NullLogItem} from "../../../../logging/NullLogger.js"; -import {TimelineMock, eventIds} from "../../../../mocks/TimelineMock.ts"; +import {TimelineMock, eventIds, eventId} from "../../../../mocks/TimelineMock.ts"; import {SyncWriter} from "./SyncWriter.js"; import {MemberWriter} from "./MemberWriter.js"; import {KeyLimits} from "../../../storage/common"; @@ -304,7 +303,6 @@ export function tests() { async function syncAndWrite(mocks, previousResponse) { const {txn, timelineMock, syncWriter, fragmentIdComparer} = mocks; const syncResponse = timelineMock.sync(previousResponse?.next_batch); - console.log(syncResponse.timeline.events); const {newLiveKey} = await syncWriter.writeSync(syncResponse, false, false, txn, logger); syncWriter.afterSync(newLiveKey); return { @@ -390,11 +388,37 @@ export function tests() { const secondEvents = await allFragmentEvents(mocks, secondFragmentEntry.fragmentId); assert.deepEqual(secondEvents.map(e => e.event_id), eventIds(10, 30)); }, - "Backfilling a fragment that is not expected to link up": async assert => { - }, "Receiving a sync with the same events as the current fragment does not create infinite link": async assert => { + const mocks = await setup(); + const { txn, timelineMock } = mocks; + timelineMock.append(10); + const {syncResponse, fragmentEntry: fragmentEntry} = await syncAndWrite(mocks); + // Mess with the saved token to receive old events in backfill + fragmentEntry.token = syncResponse.next_batch; + txn.timelineFragments.update(fragmentEntry.fragment); + await backfillAndWrite(mocks, fragmentEntry); + + const fragment = await fetchFragment(mocks, fragmentEntry.fragmentId); + assert.notEqual(fragment.nextId, fragment.id); + assert.notEqual(fragment.previousId, fragment.id); }, "An event received by sync does not interrupt backfilling": async assert => { + const mocks = await setup(); + const { timelineMock } = mocks; + timelineMock.append(10); + const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks); + timelineMock.append(11); + const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, syncResponse); + timelineMock.insertAfter(eventId(9), 5); + await backfillAndWrite(mocks, secondFragmentEntry); + + const firstEvents = await allFragmentEvents(mocks, firstFragmentEntry.fragmentId); + assert.deepEqual(firstEvents.map(e => e.event_id), eventIds(0, 10)); + const secondEvents = await allFragmentEvents(mocks, secondFragmentEntry.fragmentId); + assert.deepEqual(secondEvents.map(e => e.event_id), [...eventIds(21,26), ...eventIds(10, 21)]); + const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId); + const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId); + assertDeepLink(assert, firstFragment, secondFragment) } } } From 50c8b995c34029d7c804fb44b8ee69252c746961 Mon Sep 17 00:00:00 2001 From: Danila Fedorin Date: Mon, 20 Sep 2021 18:41:01 -0700 Subject: [PATCH 15/19] Undo GapWriter algorithm changes --- .../room/timeline/persistence/GapWriter.js | 30 +++++++++---------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/src/matrix/room/timeline/persistence/GapWriter.js b/src/matrix/room/timeline/persistence/GapWriter.js index 837d445f..58415373 100644 --- a/src/matrix/room/timeline/persistence/GapWriter.js +++ b/src/matrix/room/timeline/persistence/GapWriter.js @@ -47,30 +47,28 @@ export class GapWriter { } nonOverlappingEvents.push(...remainingEvents.slice(0, duplicateEventIndex)); if (!expectedOverlappingEventId || duplicateEventId === expectedOverlappingEventId) { - // Only link fragment if this is the first overlapping fragment we discover. - // TODO is this sufficient? Might we get "out of order" fragments from events? - if (!neighbourFragmentEntry) { - // TODO: check here that the neighbourEvent is at the correct edge of it's fragment - // get neighbour fragment to link it up later on - const neighbourEvent = await txn.timelineEvents.getByEventId(this._roomId, duplicateEventId); + // TODO: check here that the neighbourEvent is at the correct edge of it's fragment + // get neighbour fragment to link it up later on + const neighbourEvent = await txn.timelineEvents.getByEventId(this._roomId, duplicateEventId); + if (neighbourEvent.fragmentId === fragmentEntry.fragmentId) { + log.log("hit #160, prevent fragment linking to itself", log.level.Warn); + } else { const neighbourFragment = await txn.timelineFragments.get(this._roomId, neighbourEvent.fragmentId); neighbourFragmentEntry = fragmentEntry.createNeighbourEntry(neighbourFragment); } - } - // If more events remain, or if this wasn't the expected overlapping event, - // we've hit https://github.com/matrix-org/synapse/issues/7164, - // e.g. the event id we found is already in our store but it is not - // the adjacent fragment id. Ignore the event, but keep processing the ones after. - remainingEvents = remainingEvents.slice(duplicateEventIndex + 1); + // trim overlapping events + remainingEvents = null; + } else { + // we've hit https://github.com/matrix-org/synapse/issues/7164, + // e.g. the event id we found is already in our store but it is not + // the adjacent fragment id. Ignore the event, but keep processing the ones after. + remainingEvents = remainingEvents.slice(duplicateEventIndex + 1); + } } else { nonOverlappingEvents.push(...remainingEvents); remainingEvents = null; } } - if (neighbourFragmentEntry?.fragmentId === fragmentEntry.fragmentId) { - log.log("hit #160, prevent fragment linking to itself", log.level.Warn); - neighbourFragmentEntry = null; - } return {nonOverlappingEvents, neighbourFragmentEntry}; } From a3a743881d48d0c4b835624f3fa25020d530a5ce Mon Sep 17 00:00:00 2001 From: Danila Fedorin Date: Mon, 20 Sep 2021 19:37:30 -0700 Subject: [PATCH 16/19] Make test adjustments requested in PR. --- .../room/timeline/persistence/GapWriter.js | 37 +++++++++++-------- src/mocks/TimelineMock.ts | 25 ++++++++----- 2 files changed, 36 insertions(+), 26 deletions(-) diff --git a/src/matrix/room/timeline/persistence/GapWriter.js b/src/matrix/room/timeline/persistence/GapWriter.js index 58415373..d2f93e21 100644 --- a/src/matrix/room/timeline/persistence/GapWriter.js +++ b/src/matrix/room/timeline/persistence/GapWriter.js @@ -298,9 +298,9 @@ export function tests() { return { storage, txn, fragmentIdComparer, gapWriter, syncWriter, timelineMock: new TimelineMock() }; } - async function syncAndWrite(mocks, previousResponse) { + async function syncAndWrite(mocks, { previous, limit } = {}) { const {txn, timelineMock, syncWriter, fragmentIdComparer} = mocks; - const syncResponse = timelineMock.sync(previousResponse?.next_batch); + const syncResponse = timelineMock.sync(previous?.next_batch, limit); const {newLiveKey} = await syncWriter.writeSync(syncResponse, false, false, txn, logger); syncWriter.afterSync(newLiveKey); return { @@ -329,14 +329,14 @@ export function tests() { return txn.timelineFragments.get(roomId, fragmentId); } - function assertDeepLink(assert, fragment1, fragment2) { + function assertFilledLink(assert, fragment1, fragment2) { assert.equal(fragment1.nextId, fragment2.id); assert.equal(fragment2.previousId, fragment1.id); assert.equal(fragment1.nextToken, null); assert.equal(fragment2.previousToken, null); } - function assertShallowLink(assert, fragment1, fragment2) { + function assertGapLink(assert, fragment1, fragment2) { assert.equal(fragment1.nextId, fragment2.id); assert.equal(fragment2.previousId, fragment1.id); assert.notEqual(fragment2.previousToken, null); @@ -351,46 +351,49 @@ export function tests() { await backfillAndWrite(mocks, fragmentEntry); const events = await allFragmentEvents(mocks, fragmentEntry.fragmentId); assert.deepEqual(events.map(e => e.event_id), eventIds(10, 30)); + await mocks.txn.complete(); }, - "Backfilling a fragment that is expected to link up, and does": async assert => { + "Backfilling a fragment that is expected to close a gap, and does": async assert => { const mocks = await setup(); const { timelineMock } = mocks; timelineMock.append(10); - const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks); + const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks, { limit: 10 }); timelineMock.append(15); - const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, syncResponse); + const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 }); await backfillAndWrite(mocks, secondFragmentEntry); const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId); const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId); - assertDeepLink(assert, firstFragment, secondFragment) + assertFilledLink(assert, firstFragment, secondFragment) const firstEvents = await allFragmentEvents(mocks, firstFragmentEntry.fragmentId); assert.deepEqual(firstEvents.map(e => e.event_id), eventIds(0, 10)); const secondEvents = await allFragmentEvents(mocks, secondFragmentEntry.fragmentId); assert.deepEqual(secondEvents.map(e => e.event_id), eventIds(10, 25)); + await mocks.txn.complete(); }, - "Backfilling a fragment that is expected to link up, but doesn't yet": async assert => { + "Backfilling a fragment that is expected to close a gap, but doesn't yet": async assert => { const mocks = await setup(); const { timelineMock } = mocks; timelineMock.append(10); - const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks); + const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks, { limit: 10 }); timelineMock.append(20); - const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, syncResponse); + const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 }); await backfillAndWrite(mocks, secondFragmentEntry); const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId); const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId); - assertShallowLink(assert, firstFragment, secondFragment) + assertGapLink(assert, firstFragment, secondFragment) const firstEvents = await allFragmentEvents(mocks, firstFragmentEntry.fragmentId); assert.deepEqual(firstEvents.map(e => e.event_id), eventIds(0, 10)); const secondEvents = await allFragmentEvents(mocks, secondFragmentEntry.fragmentId); assert.deepEqual(secondEvents.map(e => e.event_id), eventIds(10, 30)); + await mocks.txn.complete(); }, "Receiving a sync with the same events as the current fragment does not create infinite link": async assert => { const mocks = await setup(); const { txn, timelineMock } = mocks; timelineMock.append(10); - const {syncResponse, fragmentEntry: fragmentEntry} = await syncAndWrite(mocks); + const {syncResponse, fragmentEntry: fragmentEntry} = await syncAndWrite(mocks, { limit: 10 }); // Mess with the saved token to receive old events in backfill fragmentEntry.token = syncResponse.next_batch; txn.timelineFragments.update(fragmentEntry.fragment); @@ -399,14 +402,15 @@ export function tests() { const fragment = await fetchFragment(mocks, fragmentEntry.fragmentId); assert.notEqual(fragment.nextId, fragment.id); assert.notEqual(fragment.previousId, fragment.id); + await mocks.txn.complete(); }, "An event received by sync does not interrupt backfilling": async assert => { const mocks = await setup(); const { timelineMock } = mocks; timelineMock.append(10); - const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks); + const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks, { limit: 10 }); timelineMock.append(11); - const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, syncResponse); + const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 }); timelineMock.insertAfter(eventId(9), 5); await backfillAndWrite(mocks, secondFragmentEntry); @@ -416,7 +420,8 @@ export function tests() { assert.deepEqual(secondEvents.map(e => e.event_id), [...eventIds(21,26), ...eventIds(10, 21)]); const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId); const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId); - assertDeepLink(assert, firstFragment, secondFragment) + assertFilledLink(assert, firstFragment, secondFragment) + await mocks.txn.complete(); } } } diff --git a/src/mocks/TimelineMock.ts b/src/mocks/TimelineMock.ts index 573bbf5b..444a55cf 100644 --- a/src/mocks/TimelineMock.ts +++ b/src/mocks/TimelineMock.ts @@ -109,7 +109,6 @@ export class TimelineMock { throw new Error("Fetching context for unknown event"); } const event = this._dagOrder[eventIndex]; - limit -= 1; let offset = 1; const eventsBefore: TimelineEvent[] = []; const eventsAfter: TimelineEvent[] = []; @@ -179,18 +178,18 @@ export function tests() { const context = timeline.context(eventId(15)); assert.equal(context.event.event_id, eventId(15)); assert.deepEqual(context.events_before.map(e => e.event_id).reverse(), eventIds(10, 15)); - assert.deepEqual(context.events_after.map(e => e.event_id), eventIds(16, 20)); + assert.deepEqual(context.events_after.map(e => e.event_id), eventIds(16, 21)); }, "The context endpoint returns the proper number of messages": assert => { const timeline = new TimelineMock(SENDER); timeline.append(30); for (const i of new Array(29).keys()) { const middleFetch = timeline.context(eventId(15), i + 1); - assert.equal(middleFetch.events_before.length + middleFetch.events_after.length + 1, i + 1); + assert.equal(middleFetch.events_before.length + middleFetch.events_after.length, i + 1); const startFetch = timeline.context(eventId(1), i + 1); - assert.equal(startFetch.events_before.length + startFetch.events_after.length + 1, i + 1); + assert.equal(startFetch.events_before.length + startFetch.events_after.length, i + 1); const endFetch = timeline.context(eventId(28), i + 1); - assert.equal(endFetch.events_before.length + endFetch.events_after.length + 1, i + 1); + assert.equal(endFetch.events_before.length + endFetch.events_after.length, i + 1); } }, "The previous batch from a sync returns the previous events": assert => { @@ -204,23 +203,27 @@ export function tests() { "Two consecutive message fetches are continuous if no new events are inserted": assert => { const timeline = new TimelineMock(SENDER); timeline.append(30); + const sync = timeline.sync(undefined, 10); const messages1 = timeline.messages(sync.timeline.prev_batch, undefined, "b"); const events1 = messages1.chunk.map(e => e.event_id).reverse(); + assert.deepEqual(events1, eventIds(10, 20)); + const messages2 = timeline.messages(messages1.end, undefined, "b"); const events2 = messages2.chunk.map(e => e.event_id).reverse(); - assert.deepEqual(events1, eventIds(10, 20)); assert.deepEqual(events2, eventIds(0, 10)); }, "Two consecutive message fetches detect newly inserted event": assert => { const timeline = new TimelineMock(SENDER); timeline.append(30); + const messages1 = timeline.messages(eventId(20), undefined, "b", 10); const events1 = messages1.chunk.map(e => e.event_id).reverse(); + assert.deepEqual(events1, eventIds(10, 20)); timeline.insertAfter(eventId(9), 1); + const messages2 = timeline.messages(eventId(10), undefined, "b", 10); const events2 = messages2.chunk.map(e => e.event_id).reverse(); - assert.deepEqual(events1, eventIds(10, 20)); const expectedEvents2 = eventIds(1, 10); expectedEvents2.push(eventId(30)); assert.deepEqual(events2, expectedEvents2); @@ -232,23 +235,25 @@ export function tests() { const sync2 = timeline.sync(sync1.next_batch); assert.equal(sync1.next_batch, sync2.next_batch); }, - "An event inserted in the midle still shows up in a sync": assert => { + "An event inserted at the staart still shows up in a sync": assert => { const timeline = new TimelineMock(SENDER); timeline.append(30); const sync1 = timeline.sync(undefined, 10); const sync2 = timeline.sync(sync1.next_batch, 10) assert.deepEqual(sync2.timeline.events, []); assert.equal(sync2.timeline.limited, false); + timeline.insertAfter(TIMELINE_START_TOKEN, 1); const sync3 = timeline.sync(sync2.next_batch, 10) const events = sync3.timeline.events.map(e => e.event_id); - assert.deepEqual(events, eventIds(30, 31)); + assert.deepEqual(events, [eventId(30)]); }, - "An event inserted in the midle does not show up in a message fetch": assert => { + "An event inserted at the start does not show up in a non-overlapping message fetch": assert => { const timeline = new TimelineMock(SENDER); timeline.append(30); const sync1 = timeline.sync(undefined, 10); const messages1 = timeline.messages(sync1.timeline.prev_batch, undefined, "f", 10); + timeline.insertAfter(TIMELINE_START_TOKEN, 1); const messages2 = timeline.messages(sync1.timeline.prev_batch, undefined, "f", 10); assert.deepEqual(messages1.chunk, messages2.chunk); From 92dcc6c9804d251e612c4153413396a00dbe5c66 Mon Sep 17 00:00:00 2001 From: Danila Fedorin Date: Tue, 21 Sep 2021 09:39:09 -0700 Subject: [PATCH 17/19] Remove duplicated lines --- src/matrix/storage/idb/Transaction.ts | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/matrix/storage/idb/Transaction.ts b/src/matrix/storage/idb/Transaction.ts index fd2c75a3..a4b68048 100644 --- a/src/matrix/storage/idb/Transaction.ts +++ b/src/matrix/storage/idb/Transaction.ts @@ -56,9 +56,6 @@ export class Transaction { private _writeErrors: WriteErrorInfo[]; constructor(txn: IDBTransaction, allowedStoreNames: StoreNames[], storage: Storage) { - this._txn = txn; - this._allowedStoreNames = allowedStoreNames; - this._stores = {}; this._txn = txn; this._allowedStoreNames = allowedStoreNames; this._stores = {}; From dd71fdbe080bce9cd3b40ee7db3d18ebfaa18020 Mon Sep 17 00:00:00 2001 From: Bruno Windels Date: Thu, 23 Sep 2021 10:04:58 +0200 Subject: [PATCH 18/19] add comment --- src/mocks/TimelineMock.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/src/mocks/TimelineMock.ts b/src/mocks/TimelineMock.ts index 444a55cf..d38e37cc 100644 --- a/src/mocks/TimelineMock.ts +++ b/src/mocks/TimelineMock.ts @@ -7,6 +7,7 @@ export function eventId(i: number): string { return `$event${i}`; } +/** `from` is included, `to` is excluded */ export function eventIds(from: number, to: number): string[] { return [...Array(to-from).keys()].map(i => eventId(i + from)); } From 4b7cb6da9e088096c3145aca454286b0b9e8fe2f Mon Sep 17 00:00:00 2001 From: Bruno Windels Date: Thu, 23 Sep 2021 10:10:22 +0200 Subject: [PATCH 19/19] make backfill limit explicit --- src/matrix/room/timeline/persistence/GapWriter.js | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/matrix/room/timeline/persistence/GapWriter.js b/src/matrix/room/timeline/persistence/GapWriter.js index d2f93e21..5757370e 100644 --- a/src/matrix/room/timeline/persistence/GapWriter.js +++ b/src/matrix/room/timeline/persistence/GapWriter.js @@ -312,9 +312,9 @@ export function tests() { }; } - async function backfillAndWrite(mocks, fragmentEntry) { + async function backfillAndWrite(mocks, fragmentEntry, limit) { const {txn, timelineMock, gapWriter} = mocks; - const messageResponse = timelineMock.messages(fragmentEntry.token, undefined, fragmentEntry.direction.asApiString()); + const messageResponse = timelineMock.messages(fragmentEntry.token, undefined, fragmentEntry.direction.asApiString(), limit); await gapWriter.writeFragmentFill(fragmentEntry, messageResponse, txn, logger); } @@ -348,7 +348,7 @@ export function tests() { const { timelineMock } = mocks; timelineMock.append(30); const {fragmentEntry} = await syncAndWrite(mocks); - await backfillAndWrite(mocks, fragmentEntry); + await backfillAndWrite(mocks, fragmentEntry, 10); const events = await allFragmentEvents(mocks, fragmentEntry.fragmentId); assert.deepEqual(events.map(e => e.event_id), eventIds(10, 30)); await mocks.txn.complete(); @@ -360,7 +360,7 @@ export function tests() { const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks, { limit: 10 }); timelineMock.append(15); const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 }); - await backfillAndWrite(mocks, secondFragmentEntry); + await backfillAndWrite(mocks, secondFragmentEntry, 10); const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId); const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId); @@ -378,7 +378,7 @@ export function tests() { const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks, { limit: 10 }); timelineMock.append(20); const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 }); - await backfillAndWrite(mocks, secondFragmentEntry); + await backfillAndWrite(mocks, secondFragmentEntry, 10); const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId); const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId); @@ -397,7 +397,7 @@ export function tests() { // Mess with the saved token to receive old events in backfill fragmentEntry.token = syncResponse.next_batch; txn.timelineFragments.update(fragmentEntry.fragment); - await backfillAndWrite(mocks, fragmentEntry); + await backfillAndWrite(mocks, fragmentEntry, 10); const fragment = await fetchFragment(mocks, fragmentEntry.fragmentId); assert.notEqual(fragment.nextId, fragment.id); @@ -412,7 +412,7 @@ export function tests() { timelineMock.append(11); const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 }); timelineMock.insertAfter(eventId(9), 5); - await backfillAndWrite(mocks, secondFragmentEntry); + await backfillAndWrite(mocks, secondFragmentEntry, 10); const firstEvents = await allFragmentEvents(mocks, firstFragmentEntry.fragmentId); assert.deepEqual(firstEvents.map(e => e.event_id), eventIds(0, 10));