diff --git a/package.json b/package.json index aaf7e30e..1112c06d 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "hydrogen-web", - "version": "0.2.11", + "version": "0.2.12", "description": "A javascript matrix client prototype, trying to minize RAM usage by offloading as much as possible to IndexedDB", "main": "index.js", "directories": { diff --git a/prototypes/idb-continue-on-constrainterror.html b/prototypes/idb-continue-on-constrainterror.html new file mode 100644 index 00000000..71e56c27 --- /dev/null +++ b/prototypes/idb-continue-on-constrainterror.html @@ -0,0 +1,100 @@ + + + + + + + + + + + diff --git a/src/domain/session/room/timeline/ReactionsViewModel.js b/src/domain/session/room/timeline/ReactionsViewModel.js index 3fd9f15f..f0dcf79f 100644 --- a/src/domain/session/room/timeline/ReactionsViewModel.js +++ b/src/domain/session/room/timeline/ReactionsViewModel.js @@ -247,8 +247,8 @@ export function tests() { storage.storeNames.timelineFragments ]); txn.timelineFragments.add({id: 1, roomId}); - txn.timelineEvents.insert({fragmentId: 1, eventIndex: 2, event: messageEvent, roomId}); - txn.timelineEvents.insert({fragmentId: 1, eventIndex: 3, event: myReactionEvent, roomId}); + txn.timelineEvents.tryInsert({fragmentId: 1, eventIndex: 2, event: messageEvent, roomId}, new NullLogItem()); + txn.timelineEvents.tryInsert({fragmentId: 1, eventIndex: 3, event: myReactionEvent, roomId}, new NullLogItem()); await relationWriter.writeRelation(myReactionEntry, txn, new NullLogItem()); await txn.complete(); // 2. setup queue & timeline @@ -309,7 +309,7 @@ export function tests() { storage.storeNames.timelineFragments ]); txn.timelineFragments.add({id: 1, roomId}); - txn.timelineEvents.insert({fragmentId: 1, eventIndex: 2, event: messageEvent, roomId}); + txn.timelineEvents.tryInsert({fragmentId: 1, eventIndex: 2, event: messageEvent, roomId}, new NullLogItem()); await txn.complete(); // 2. setup queue & timeline const queue = new SendQueue({roomId, storage, hsApi: new MockHomeServer().api}); diff --git a/src/logging/IDBLogger.js b/src/logging/IDBLogger.js index 96578cd0..03c2bf88 100644 --- a/src/logging/IDBLogger.js +++ b/src/logging/IDBLogger.js @@ -21,7 +21,7 @@ import { reqAsPromise, iterateCursor, fetchResults, -} from "../matrix/storage/idb/utils.js"; +} from "../matrix/storage/idb/utils"; import {BaseLogger} from "./BaseLogger.js"; export class IDBLogger extends BaseLogger { diff --git a/src/logging/NullLogger.js b/src/logging/NullLogger.js index c0f3a143..060212bd 100644 --- a/src/logging/NullLogger.js +++ b/src/logging/NullLogger.js @@ -20,7 +20,7 @@ function noop () {} export class NullLogger { constructor() { - this.item = new NullLogItem(); + this.item = new NullLogItem(this); } log() {} @@ -51,6 +51,10 @@ export class NullLogger { } export class NullLogItem { + constructor(logger) { + this.logger = logger; + } + wrap(_, callback) { return callback(this); } diff --git a/src/matrix/room/timeline/Timeline.js b/src/matrix/room/timeline/Timeline.js index 54eabf96..9169b029 100644 --- a/src/matrix/room/timeline/Timeline.js +++ b/src/matrix/room/timeline/Timeline.js @@ -447,10 +447,10 @@ export function tests() { // 1. put event and reaction into storage const storage = await createMockStorage(); const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents, storage.storeNames.timelineRelations]); - txn.timelineEvents.insert({ + txn.timelineEvents.tryInsert({ event: withContent(createAnnotation(messageId, "👋"), createEvent("m.reaction", reactionId, bob)), fragmentId: 1, eventIndex: 1, roomId - }); + }, new NullLogItem()); txn.timelineRelations.add(roomId, messageId, ANNOTATION_RELATION_TYPE, reactionId); await txn.complete(); // 2. setup the timeline @@ -543,10 +543,10 @@ export function tests() { // 1. put reaction in storage const storage = await createMockStorage(); const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents, storage.storeNames.timelineRelations]); - txn.timelineEvents.insert({ + txn.timelineEvents.tryInsert({ event: withContent(createAnnotation(messageId, "👋"), createEvent("m.reaction", reactionId, bob)), fragmentId: 1, eventIndex: 3, roomId - }); + }, new NullLogItem()); await txn.complete(); // 2. setup timeline const pendingEvents = new ObservableArray(); diff --git a/src/matrix/room/timeline/persistence/GapWriter.js b/src/matrix/room/timeline/persistence/GapWriter.js index 55b15ef5..b040e3a1 100644 --- a/src/matrix/room/timeline/persistence/GapWriter.js +++ b/src/matrix/room/timeline/persistence/GapWriter.js @@ -105,9 +105,10 @@ export class GapWriter { if (updatedRelationTargetEntries) { updatedEntries.push(...updatedRelationTargetEntries); } - txn.timelineEvents.insert(eventStorageEntry); - const eventEntry = new EventEntry(eventStorageEntry, this._fragmentIdComparer); - directionalAppend(entries, eventEntry, direction); + if (await txn.timelineEvents.tryInsert(eventStorageEntry, log)) { + const eventEntry = new EventEntry(eventStorageEntry, this._fragmentIdComparer); + directionalAppend(entries, eventEntry, direction); + } } return {entries, updatedEntries}; } @@ -293,9 +294,9 @@ export function tests() { }; } - async function backfillAndWrite(mocks, fragmentEntry) { + async function backfillAndWrite(mocks, fragmentEntry, limit) { const {txn, timelineMock, gapWriter} = mocks; - const messageResponse = timelineMock.messages(fragmentEntry.token, undefined, fragmentEntry.direction.asApiString()); + const messageResponse = timelineMock.messages(fragmentEntry.token, undefined, fragmentEntry.direction.asApiString(), limit); await gapWriter.writeFragmentFill(fragmentEntry, messageResponse, txn, logger); } @@ -333,7 +334,7 @@ export function tests() { const { timelineMock } = mocks; timelineMock.append(30); const {fragmentEntry} = await syncAndWrite(mocks); - await backfillAndWrite(mocks, fragmentEntry); + await backfillAndWrite(mocks, fragmentEntry, 10); const events = await allFragmentEvents(mocks, fragmentEntry.fragmentId); assert.deepEqual(events.map(e => e.event_id), eventIds(10, 30)); await mocks.txn.complete(); @@ -346,8 +347,8 @@ export function tests() { timelineMock.append(15); const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 }); // Only the second backfill (in which all events overlap) fills the gap. - await backfillAndWrite(mocks, secondFragmentEntry); - await backfillAndWrite(mocks, await updatedFragmentEntry(mocks, secondFragmentEntry)); + await backfillAndWrite(mocks, secondFragmentEntry, 10); + await backfillAndWrite(mocks, await updatedFragmentEntry(mocks, secondFragmentEntry), 10); const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId); const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId); @@ -365,7 +366,7 @@ export function tests() { const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks, { limit: 10 }); timelineMock.append(20); const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 }); - await backfillAndWrite(mocks, secondFragmentEntry); + await backfillAndWrite(mocks, secondFragmentEntry, 10); const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId); const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId); @@ -384,7 +385,7 @@ export function tests() { // Mess with the saved token to receive old events in backfill fragmentEntry.token = syncResponse.next_batch; txn.timelineFragments.update(fragmentEntry.fragment); - await backfillAndWrite(mocks, fragmentEntry); + await backfillAndWrite(mocks, fragmentEntry, 10); const fragment = await fetchFragment(mocks, fragmentEntry.fragmentId); assert.notEqual(fragment.nextId, fragment.id); @@ -400,8 +401,8 @@ export function tests() { const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 }); timelineMock.insertAfter(eventId(9), 5); // Only the second backfill (in which all events overlap) fills the gap. - await backfillAndWrite(mocks, secondFragmentEntry); - await backfillAndWrite(mocks, await updatedFragmentEntry(mocks, secondFragmentEntry)); + await backfillAndWrite(mocks, secondFragmentEntry, 10); + await backfillAndWrite(mocks, await updatedFragmentEntry(mocks, secondFragmentEntry), 10); const firstEvents = await allFragmentEvents(mocks, firstFragmentEntry.fragmentId); assert.deepEqual(firstEvents.map(e => e.event_id), eventIds(0, 10)); diff --git a/src/matrix/room/timeline/persistence/RelationWriter.js b/src/matrix/room/timeline/persistence/RelationWriter.js index 4116b775..0466b3da 100644 --- a/src/matrix/room/timeline/persistence/RelationWriter.js +++ b/src/matrix/room/timeline/persistence/RelationWriter.js @@ -275,7 +275,7 @@ export function tests() { const storage = await createMockStorage(); const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents, storage.storeNames.timelineRelations]); - txn.timelineEvents.insert({fragmentId: 1, eventIndex: 2, event, roomId}); + txn.timelineEvents.tryInsert({fragmentId: 1, eventIndex: 2, event, roomId}, new NullLogItem()); const updatedEntries = await relationWriter.writeRelation(redactionEntry, txn, new NullLogItem()); await txn.complete(); @@ -300,7 +300,7 @@ export function tests() { const storage = await createMockStorage(); const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents, storage.storeNames.timelineRelations]); - txn.timelineEvents.insert({fragmentId: 1, eventIndex: 2, event, roomId}); + txn.timelineEvents.tryInsert({fragmentId: 1, eventIndex: 2, event, roomId}, new NullLogItem()); const updatedEntries = await relationWriter.writeRelation(reactionEntry, txn, new NullLogItem()); await txn.complete(); @@ -329,7 +329,7 @@ export function tests() { const storage = await createMockStorage(); const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents, storage.storeNames.timelineRelations]); - txn.timelineEvents.insert({fragmentId: 1, eventIndex: 2, event, roomId}); + txn.timelineEvents.tryInsert({fragmentId: 1, eventIndex: 2, event, roomId}, new NullLogItem()); await relationWriter.writeRelation(reaction1Entry, txn, new NullLogItem()); const updatedEntries = await relationWriter.writeRelation(reaction2Entry, txn, new NullLogItem()); await txn.complete(); @@ -358,10 +358,10 @@ export function tests() { const storage = await createMockStorage(); const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents, storage.storeNames.timelineRelations]); - txn.timelineEvents.insert({fragmentId: 1, eventIndex: 2, event, roomId}); - txn.timelineEvents.insert({fragmentId: 1, eventIndex: 3, event: myReaction, roomId}); + txn.timelineEvents.tryInsert({fragmentId: 1, eventIndex: 2, event, roomId}, new NullLogItem()); + txn.timelineEvents.tryInsert({fragmentId: 1, eventIndex: 3, event: myReaction, roomId}, new NullLogItem()); await relationWriter.writeRelation(myReactionEntry, txn, new NullLogItem()); - txn.timelineEvents.insert({fragmentId: 1, eventIndex: 4, event: bobReaction, roomId}); + txn.timelineEvents.tryInsert({fragmentId: 1, eventIndex: 4, event: bobReaction, roomId}, new NullLogItem()); await relationWriter.writeRelation(bobReactionEntry, txn, new NullLogItem()); const updatedEntries = await relationWriter.writeRelation(myReactionRedactionEntry, txn, new NullLogItem()); await txn.complete(); diff --git a/src/matrix/room/timeline/persistence/SyncWriter.js b/src/matrix/room/timeline/persistence/SyncWriter.js index 07326225..af6f55bc 100644 --- a/src/matrix/room/timeline/persistence/SyncWriter.js +++ b/src/matrix/room/timeline/persistence/SyncWriter.js @@ -162,7 +162,10 @@ export class SyncWriter { storageEntry.displayName = member.displayName; storageEntry.avatarUrl = member.avatarUrl; } - txn.timelineEvents.insert(storageEntry, log); + const couldInsert = await txn.timelineEvents.tryInsert(storageEntry, log); + if (!couldInsert) { + continue; + } const entry = new EventEntry(storageEntry, this._fragmentIdComparer); entries.push(entry); const updatedRelationTargetEntries = await this._relationWriter.writeRelation(entry, txn, log); @@ -252,3 +255,35 @@ export class SyncWriter { return this._lastLiveKey; } } + +import {createMockStorage} from "../../../../mocks/Storage.js"; +import {createEvent, withTextBody} from "../../../../mocks/event.js"; +import {Instance as nullLogger} from "../../../../logging/NullLogger.js"; +export function tests() { + const roomId = "!abc:hs.tld"; + return { + "calling timelineEvents.tryInsert with the same event id a second time fails": async assert => { + const storage = await createMockStorage(); + const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents]); + const event = withTextBody("hello!", createEvent("m.room.message", "$abc", "@alice:hs.tld")); + const entry1 = createEventEntry(EventKey.defaultLiveKey, roomId, event); + assert.equal(await txn.timelineEvents.tryInsert(entry1, nullLogger.item), true); + const entry2 = createEventEntry(EventKey.defaultLiveKey.nextKey(), roomId, event); + assert.equal(await txn.timelineEvents.tryInsert(entry2, nullLogger.item), false); + // fake-indexeddb still aborts the transaction when preventDefault is called by tryInsert, so don't await as it will abort + // await txn.complete(); + }, + "calling timelineEvents.tryInsert with the same event key a second time fails": async assert => { + const storage = await createMockStorage(); + const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents]); + const event1 = withTextBody("hello!", createEvent("m.room.message", "$abc", "@alice:hs.tld")); + const entry1 = createEventEntry(EventKey.defaultLiveKey, roomId, event1); + assert.equal(await txn.timelineEvents.tryInsert(entry1, nullLogger.item), true); + const event2 = withTextBody("hello!", createEvent("m.room.message", "$def", "@alice:hs.tld")); + const entry2 = createEventEntry(EventKey.defaultLiveKey, roomId, event2); + assert.equal(await txn.timelineEvents.tryInsert(entry2, nullLogger.item), false); + // fake-indexeddb still aborts the transaction when preventDefault is called by tryInsert, so don't await as it will abort + // await txn.complete(); + }, + } +} diff --git a/src/matrix/storage/idb/Store.ts b/src/matrix/storage/idb/Store.ts index 662dad26..9c350b98 100644 --- a/src/matrix/storage/idb/Store.ts +++ b/src/matrix/storage/idb/Store.ts @@ -15,9 +15,9 @@ limitations under the License. */ import {QueryTarget, IDBQuery} from "./QueryTarget"; -import {IDBRequestAttemptError} from "./error"; +import {IDBRequestError, IDBRequestAttemptError} from "./error"; import {reqAsPromise} from "./utils"; -import {Transaction} from "./Transaction"; +import {Transaction, IDBKey} from "./Transaction"; import {LogItem} from "../../../logging/LogItem.js"; const LOG_REQUESTS = false; @@ -126,6 +126,10 @@ class QueryTargetWrapper { throw new IDBRequestAttemptError("index", this._qt, err, [name]); } } + + get indexNames(): string[] { + return Array.from(this._qtStore.indexNames); + } } export class Store extends QueryTarget { @@ -162,30 +166,62 @@ export class Store extends QueryTarget { this._prepareErrorLog(request, log, "add", undefined, value); } + async tryAdd(value: T, log: LogItem): Promise { + try { + await reqAsPromise(this._idbStore.add(value)); + return true; + } catch (err) { + if (err instanceof IDBRequestError) { + log.log({l: "could not write", id: this._getKeys(value), e: err}, log.level.Warn); + err.preventTransactionAbort(); + return false; + } else { + throw err; + } + } + } + delete(keyOrKeyRange: IDBValidKey | IDBKeyRange, log?: LogItem): void { // ok to not monitor result of request, see comment in `put`. const request = this._idbStore.delete(keyOrKeyRange); this._prepareErrorLog(request, log, "delete", keyOrKeyRange, undefined); } - private _prepareErrorLog(request: IDBRequest, log: LogItem | undefined, operationName: string, key: IDBValidKey | IDBKeyRange | undefined, value: T | undefined) { + private _prepareErrorLog(request: IDBRequest, log: LogItem | undefined, operationName: string, key: IDBKey | undefined, value: T | undefined) { if (log) { log.ensureRefId(); } reqAsPromise(request).catch(err => { - try { - if (!key && value) { - key = this._getKey(value); - } - } catch { - key = "getKey failed"; + let keys : IDBKey[] | undefined = undefined; + if (value) { + keys = this._getKeys(value); + } else if (key) { + keys = [key]; } - this._transaction.addWriteError(err, log, operationName, key); + this._transaction.addWriteError(err, log, operationName, keys); }); } - private _getKey(value: T): IDBValidKey { + private _getKeys(value: T): IDBValidKey[] { + const keys: IDBValidKey[] = []; const {keyPath} = this._idbStore; + try { + keys.push(this._readKeyPath(value, keyPath)); + } catch (err) { + console.warn("could not read keyPath", keyPath); + } + for (const indexName of this._idbStore.indexNames) { + try { + const index = this._idbStore.index(indexName); + keys.push(this._readKeyPath(value, index.keyPath)); + } catch (err) { + console.warn("could not read index", indexName); + } + } + return keys; + } + + private _readKeyPath(value: T, keyPath: string[] | string): IDBValidKey { if (Array.isArray(keyPath)) { let field: any = value; for (const part of keyPath) { @@ -198,6 +234,6 @@ export class Store extends QueryTarget { return field as IDBValidKey; } else { return value[keyPath] as IDBValidKey; - } + } } } diff --git a/src/matrix/storage/idb/Transaction.ts b/src/matrix/storage/idb/Transaction.ts index a4b68048..9de4caf2 100644 --- a/src/matrix/storage/idb/Transaction.ts +++ b/src/matrix/storage/idb/Transaction.ts @@ -39,12 +39,14 @@ import {AccountDataStore} from "./stores/AccountDataStore"; import {LogItem} from "../../../logging/LogItem.js"; import {BaseLogger} from "../../../logging/BaseLogger.js"; +export type IDBKey = IDBValidKey | IDBKeyRange; + class WriteErrorInfo { constructor( public readonly error: StorageError, public readonly refItem: LogItem | undefined, public readonly operationName: string, - public readonly key: IDBValidKey | IDBKeyRange | undefined, + public readonly keys: IDBKey[] | undefined, ) {} } @@ -196,10 +198,10 @@ export class Transaction { } } - addWriteError(error: StorageError, refItem: LogItem | undefined, operationName: string, key: IDBValidKey | IDBKeyRange | undefined) { + addWriteError(error: StorageError, refItem: LogItem | undefined, operationName: string, keys: IDBKey[] | undefined) { // don't log subsequent `AbortError`s if (error.errcode !== "AbortError" || this._writeErrors.length === 0) { - this._writeErrors.push(new WriteErrorInfo(error, refItem, operationName, key)); + this._writeErrors.push(new WriteErrorInfo(error, refItem, operationName, keys)); } } @@ -210,7 +212,7 @@ export class Transaction { errorGroupItem.set("allowedStoreNames", this._allowedStoreNames); } for (const info of this._writeErrors) { - errorGroupItem.wrap({l: info.operationName, id: info.key}, item => { + errorGroupItem.wrap({l: info.operationName, id: info.keys}, item => { if (info.refItem) { item.refDetached(info.refItem); } diff --git a/src/matrix/storage/idb/error.ts b/src/matrix/storage/idb/error.ts index 388ad4c0..fb602168 100644 --- a/src/matrix/storage/idb/error.ts +++ b/src/matrix/storage/idb/error.ts @@ -57,10 +57,18 @@ export class IDBError extends StorageError { } export class IDBRequestError extends IDBError { - constructor(request: IDBRequest, message: string = "IDBRequest failed") { + private errorEvent: Event; + + constructor(errorEvent: Event) { + const request = errorEvent.target as IDBRequest; const source = request.source; const cause = request.error; - super(message, source, cause); + super("IDBRequest failed", source, cause); + this.errorEvent = errorEvent; + } + + preventTransactionAbort() { + this.errorEvent.preventDefault(); } } diff --git a/src/matrix/storage/idb/stores/TimelineEventStore.ts b/src/matrix/storage/idb/stores/TimelineEventStore.ts index 47328820..34165d9e 100644 --- a/src/matrix/storage/idb/stores/TimelineEventStore.ts +++ b/src/matrix/storage/idb/stores/TimelineEventStore.ts @@ -253,15 +253,17 @@ export class TimelineEventStore { return occuringEventIds; } - /** Inserts a new entry into the store. The combination of roomId and eventKey should not exist yet, or an error is thrown. - * @param entry the entry to insert - * @return nothing. To wait for the operation to finish, await the transaction it's part of. - * @throws {StorageError} ... + /** Inserts a new entry into the store. + * + * If the event already exists in the store (either the eventKey or the event id + * are already known for the given roomId), this operation has no effect. + * + * Returns if the event was not yet known and the entry was written. */ - insert(entry: TimelineEventEntry, log: LogItem): void { + tryInsert(entry: TimelineEventEntry, log: LogItem): Promise { (entry as TimelineEventStorageEntry).key = encodeKey(entry.roomId, entry.fragmentId, entry.eventIndex); (entry as TimelineEventStorageEntry).eventIdKey = encodeEventIdKey(entry.roomId, entry.event.event_id); - this._timelineStore.add(entry as TimelineEventStorageEntry, log); + return this._timelineStore.tryAdd(entry as TimelineEventStorageEntry, log); } /** Updates the entry into the store with the given [roomId, eventKey] combination. diff --git a/src/matrix/storage/idb/utils.ts b/src/matrix/storage/idb/utils.ts index f9209f56..a9432139 100644 --- a/src/matrix/storage/idb/utils.ts +++ b/src/matrix/storage/idb/utils.ts @@ -97,7 +97,7 @@ export function reqAsPromise(req: IDBRequest): Promise { needsSyncPromise && Promise._flush && Promise._flush(); }); req.addEventListener("error", event => { - const error = new IDBRequestError(event.target as IDBRequest); + const error = new IDBRequestError(event); reject(error); // @ts-ignore needsSyncPromise && Promise._flush && Promise._flush(); @@ -143,8 +143,8 @@ type CursorIterator = (value: I extends IDBCursorWithVal export function iterateCursor(cursorRequest: IDBRequest, processValue: CursorIterator): Promise { // TODO: does cursor already have a value here?? return new Promise((resolve, reject) => { - cursorRequest.onerror = () => { - reject(new IDBRequestError(cursorRequest)); + cursorRequest.onerror = event => { + reject(new IDBRequestError(event)); // @ts-ignore needsSyncPromise && Promise._flush && Promise._flush(); }; diff --git a/src/mocks/Storage.js b/src/mocks/Storage.js index 359ffa2c..876cc009 100644 --- a/src/mocks/Storage.js +++ b/src/mocks/Storage.js @@ -16,8 +16,8 @@ limitations under the License. import {FDBFactory, FDBKeyRange} from "../../lib/fake-indexeddb/index.js"; import {StorageFactory} from "../matrix/storage/idb/StorageFactory"; -import {NullLogItem} from "../logging/NullLogger.js"; +import {Instance as nullLogger} from "../logging/NullLogger.js"; export function createMockStorage() { - return new StorageFactory(null, new FDBFactory(), FDBKeyRange).create(1, new NullLogItem()); + return new StorageFactory(null, new FDBFactory(), FDBKeyRange).create(1, nullLogger.item); } diff --git a/src/mocks/TimelineMock.ts b/src/mocks/TimelineMock.ts index 444a55cf..d38e37cc 100644 --- a/src/mocks/TimelineMock.ts +++ b/src/mocks/TimelineMock.ts @@ -7,6 +7,7 @@ export function eventId(i: number): string { return `$event${i}`; } +/** `from` is included, `to` is excluded */ export function eventIds(from: number, to: number): string[] { return [...Array(to-from).keys()].map(i => eventId(i + from)); } diff --git a/src/platform/web/ui/session/room/timeline/BaseMessageView.js b/src/platform/web/ui/session/room/timeline/BaseMessageView.js index e1268cb3..424587a8 100644 --- a/src/platform/web/ui/session/room/timeline/BaseMessageView.js +++ b/src/platform/web/ui/session/room/timeline/BaseMessageView.js @@ -66,7 +66,7 @@ export class BaseMessageView extends TemplateView { let reactionsView = null; t.mapSideEffect(vm => vm.reactions, reactions => { if (reactions && this._interactive && !reactionsView) { - reactionsView = new ReactionsView(vm.reactions); + reactionsView = new ReactionsView(reactions); this.addSubView(reactionsView); li.appendChild(mountView(reactionsView)); } else if (!reactions && reactionsView) {