Make test adjustments requested in PR.
This commit is contained in:
parent
50c8b995c3
commit
a3a743881d
2 changed files with 36 additions and 26 deletions
|
@ -298,9 +298,9 @@ export function tests() {
|
|||
return { storage, txn, fragmentIdComparer, gapWriter, syncWriter, timelineMock: new TimelineMock() };
|
||||
}
|
||||
|
||||
async function syncAndWrite(mocks, previousResponse) {
|
||||
async function syncAndWrite(mocks, { previous, limit } = {}) {
|
||||
const {txn, timelineMock, syncWriter, fragmentIdComparer} = mocks;
|
||||
const syncResponse = timelineMock.sync(previousResponse?.next_batch);
|
||||
const syncResponse = timelineMock.sync(previous?.next_batch, limit);
|
||||
const {newLiveKey} = await syncWriter.writeSync(syncResponse, false, false, txn, logger);
|
||||
syncWriter.afterSync(newLiveKey);
|
||||
return {
|
||||
|
@ -329,14 +329,14 @@ export function tests() {
|
|||
return txn.timelineFragments.get(roomId, fragmentId);
|
||||
}
|
||||
|
||||
function assertDeepLink(assert, fragment1, fragment2) {
|
||||
function assertFilledLink(assert, fragment1, fragment2) {
|
||||
assert.equal(fragment1.nextId, fragment2.id);
|
||||
assert.equal(fragment2.previousId, fragment1.id);
|
||||
assert.equal(fragment1.nextToken, null);
|
||||
assert.equal(fragment2.previousToken, null);
|
||||
}
|
||||
|
||||
function assertShallowLink(assert, fragment1, fragment2) {
|
||||
function assertGapLink(assert, fragment1, fragment2) {
|
||||
assert.equal(fragment1.nextId, fragment2.id);
|
||||
assert.equal(fragment2.previousId, fragment1.id);
|
||||
assert.notEqual(fragment2.previousToken, null);
|
||||
|
@ -351,46 +351,49 @@ export function tests() {
|
|||
await backfillAndWrite(mocks, fragmentEntry);
|
||||
const events = await allFragmentEvents(mocks, fragmentEntry.fragmentId);
|
||||
assert.deepEqual(events.map(e => e.event_id), eventIds(10, 30));
|
||||
await mocks.txn.complete();
|
||||
},
|
||||
"Backfilling a fragment that is expected to link up, and does": async assert => {
|
||||
"Backfilling a fragment that is expected to close a gap, and does": async assert => {
|
||||
const mocks = await setup();
|
||||
const { timelineMock } = mocks;
|
||||
timelineMock.append(10);
|
||||
const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks);
|
||||
const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks, { limit: 10 });
|
||||
timelineMock.append(15);
|
||||
const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, syncResponse);
|
||||
const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 });
|
||||
await backfillAndWrite(mocks, secondFragmentEntry);
|
||||
|
||||
const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId);
|
||||
const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId);
|
||||
assertDeepLink(assert, firstFragment, secondFragment)
|
||||
assertFilledLink(assert, firstFragment, secondFragment)
|
||||
const firstEvents = await allFragmentEvents(mocks, firstFragmentEntry.fragmentId);
|
||||
assert.deepEqual(firstEvents.map(e => e.event_id), eventIds(0, 10));
|
||||
const secondEvents = await allFragmentEvents(mocks, secondFragmentEntry.fragmentId);
|
||||
assert.deepEqual(secondEvents.map(e => e.event_id), eventIds(10, 25));
|
||||
await mocks.txn.complete();
|
||||
},
|
||||
"Backfilling a fragment that is expected to link up, but doesn't yet": async assert => {
|
||||
"Backfilling a fragment that is expected to close a gap, but doesn't yet": async assert => {
|
||||
const mocks = await setup();
|
||||
const { timelineMock } = mocks;
|
||||
timelineMock.append(10);
|
||||
const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks);
|
||||
const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks, { limit: 10 });
|
||||
timelineMock.append(20);
|
||||
const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, syncResponse);
|
||||
const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 });
|
||||
await backfillAndWrite(mocks, secondFragmentEntry);
|
||||
|
||||
const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId);
|
||||
const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId);
|
||||
assertShallowLink(assert, firstFragment, secondFragment)
|
||||
assertGapLink(assert, firstFragment, secondFragment)
|
||||
const firstEvents = await allFragmentEvents(mocks, firstFragmentEntry.fragmentId);
|
||||
assert.deepEqual(firstEvents.map(e => e.event_id), eventIds(0, 10));
|
||||
const secondEvents = await allFragmentEvents(mocks, secondFragmentEntry.fragmentId);
|
||||
assert.deepEqual(secondEvents.map(e => e.event_id), eventIds(10, 30));
|
||||
await mocks.txn.complete();
|
||||
},
|
||||
"Receiving a sync with the same events as the current fragment does not create infinite link": async assert => {
|
||||
const mocks = await setup();
|
||||
const { txn, timelineMock } = mocks;
|
||||
timelineMock.append(10);
|
||||
const {syncResponse, fragmentEntry: fragmentEntry} = await syncAndWrite(mocks);
|
||||
const {syncResponse, fragmentEntry: fragmentEntry} = await syncAndWrite(mocks, { limit: 10 });
|
||||
// Mess with the saved token to receive old events in backfill
|
||||
fragmentEntry.token = syncResponse.next_batch;
|
||||
txn.timelineFragments.update(fragmentEntry.fragment);
|
||||
|
@ -399,14 +402,15 @@ export function tests() {
|
|||
const fragment = await fetchFragment(mocks, fragmentEntry.fragmentId);
|
||||
assert.notEqual(fragment.nextId, fragment.id);
|
||||
assert.notEqual(fragment.previousId, fragment.id);
|
||||
await mocks.txn.complete();
|
||||
},
|
||||
"An event received by sync does not interrupt backfilling": async assert => {
|
||||
const mocks = await setup();
|
||||
const { timelineMock } = mocks;
|
||||
timelineMock.append(10);
|
||||
const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks);
|
||||
const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks, { limit: 10 });
|
||||
timelineMock.append(11);
|
||||
const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, syncResponse);
|
||||
const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 });
|
||||
timelineMock.insertAfter(eventId(9), 5);
|
||||
await backfillAndWrite(mocks, secondFragmentEntry);
|
||||
|
||||
|
@ -416,7 +420,8 @@ export function tests() {
|
|||
assert.deepEqual(secondEvents.map(e => e.event_id), [...eventIds(21,26), ...eventIds(10, 21)]);
|
||||
const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId);
|
||||
const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId);
|
||||
assertDeepLink(assert, firstFragment, secondFragment)
|
||||
assertFilledLink(assert, firstFragment, secondFragment)
|
||||
await mocks.txn.complete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -109,7 +109,6 @@ export class TimelineMock {
|
|||
throw new Error("Fetching context for unknown event");
|
||||
}
|
||||
const event = this._dagOrder[eventIndex];
|
||||
limit -= 1;
|
||||
let offset = 1;
|
||||
const eventsBefore: TimelineEvent[] = [];
|
||||
const eventsAfter: TimelineEvent[] = [];
|
||||
|
@ -179,18 +178,18 @@ export function tests() {
|
|||
const context = timeline.context(eventId(15));
|
||||
assert.equal(context.event.event_id, eventId(15));
|
||||
assert.deepEqual(context.events_before.map(e => e.event_id).reverse(), eventIds(10, 15));
|
||||
assert.deepEqual(context.events_after.map(e => e.event_id), eventIds(16, 20));
|
||||
assert.deepEqual(context.events_after.map(e => e.event_id), eventIds(16, 21));
|
||||
},
|
||||
"The context endpoint returns the proper number of messages": assert => {
|
||||
const timeline = new TimelineMock(SENDER);
|
||||
timeline.append(30);
|
||||
for (const i of new Array(29).keys()) {
|
||||
const middleFetch = timeline.context(eventId(15), i + 1);
|
||||
assert.equal(middleFetch.events_before.length + middleFetch.events_after.length + 1, i + 1);
|
||||
assert.equal(middleFetch.events_before.length + middleFetch.events_after.length, i + 1);
|
||||
const startFetch = timeline.context(eventId(1), i + 1);
|
||||
assert.equal(startFetch.events_before.length + startFetch.events_after.length + 1, i + 1);
|
||||
assert.equal(startFetch.events_before.length + startFetch.events_after.length, i + 1);
|
||||
const endFetch = timeline.context(eventId(28), i + 1);
|
||||
assert.equal(endFetch.events_before.length + endFetch.events_after.length + 1, i + 1);
|
||||
assert.equal(endFetch.events_before.length + endFetch.events_after.length, i + 1);
|
||||
}
|
||||
},
|
||||
"The previous batch from a sync returns the previous events": assert => {
|
||||
|
@ -204,23 +203,27 @@ export function tests() {
|
|||
"Two consecutive message fetches are continuous if no new events are inserted": assert => {
|
||||
const timeline = new TimelineMock(SENDER);
|
||||
timeline.append(30);
|
||||
|
||||
const sync = timeline.sync(undefined, 10);
|
||||
const messages1 = timeline.messages(sync.timeline.prev_batch, undefined, "b");
|
||||
const events1 = messages1.chunk.map(e => e.event_id).reverse();
|
||||
assert.deepEqual(events1, eventIds(10, 20));
|
||||
|
||||
const messages2 = timeline.messages(messages1.end, undefined, "b");
|
||||
const events2 = messages2.chunk.map(e => e.event_id).reverse();
|
||||
assert.deepEqual(events1, eventIds(10, 20));
|
||||
assert.deepEqual(events2, eventIds(0, 10));
|
||||
},
|
||||
"Two consecutive message fetches detect newly inserted event": assert => {
|
||||
const timeline = new TimelineMock(SENDER);
|
||||
timeline.append(30);
|
||||
|
||||
const messages1 = timeline.messages(eventId(20), undefined, "b", 10);
|
||||
const events1 = messages1.chunk.map(e => e.event_id).reverse();
|
||||
assert.deepEqual(events1, eventIds(10, 20));
|
||||
timeline.insertAfter(eventId(9), 1);
|
||||
|
||||
const messages2 = timeline.messages(eventId(10), undefined, "b", 10);
|
||||
const events2 = messages2.chunk.map(e => e.event_id).reverse();
|
||||
assert.deepEqual(events1, eventIds(10, 20));
|
||||
const expectedEvents2 = eventIds(1, 10);
|
||||
expectedEvents2.push(eventId(30));
|
||||
assert.deepEqual(events2, expectedEvents2);
|
||||
|
@ -232,23 +235,25 @@ export function tests() {
|
|||
const sync2 = timeline.sync(sync1.next_batch);
|
||||
assert.equal(sync1.next_batch, sync2.next_batch);
|
||||
},
|
||||
"An event inserted in the midle still shows up in a sync": assert => {
|
||||
"An event inserted at the staart still shows up in a sync": assert => {
|
||||
const timeline = new TimelineMock(SENDER);
|
||||
timeline.append(30);
|
||||
const sync1 = timeline.sync(undefined, 10);
|
||||
const sync2 = timeline.sync(sync1.next_batch, 10)
|
||||
assert.deepEqual(sync2.timeline.events, []);
|
||||
assert.equal(sync2.timeline.limited, false);
|
||||
|
||||
timeline.insertAfter(TIMELINE_START_TOKEN, 1);
|
||||
const sync3 = timeline.sync(sync2.next_batch, 10)
|
||||
const events = sync3.timeline.events.map(e => e.event_id);
|
||||
assert.deepEqual(events, eventIds(30, 31));
|
||||
assert.deepEqual(events, [eventId(30)]);
|
||||
},
|
||||
"An event inserted in the midle does not show up in a message fetch": assert => {
|
||||
"An event inserted at the start does not show up in a non-overlapping message fetch": assert => {
|
||||
const timeline = new TimelineMock(SENDER);
|
||||
timeline.append(30);
|
||||
const sync1 = timeline.sync(undefined, 10);
|
||||
const messages1 = timeline.messages(sync1.timeline.prev_batch, undefined, "f", 10);
|
||||
|
||||
timeline.insertAfter(TIMELINE_START_TOKEN, 1);
|
||||
const messages2 = timeline.messages(sync1.timeline.prev_batch, undefined, "f", 10);
|
||||
assert.deepEqual(messages1.chunk, messages2.chunk);
|
||||
|
|
Reference in a new issue