This commit is contained in:
Bruno Windels 2020-06-26 23:26:24 +02:00
parent 625598be90
commit f5d3092031
19 changed files with 358 additions and 82 deletions

View file

@ -1,3 +1,11 @@
# TODO
## Member list
- support migrations in StorageFactory
- migrate all stores from key to key_path
- how to deal with members coming from backfill? do we even need to store them?
# How to store members?
All of this is assuming we'll use lazy loading of members.

View file

@ -0,0 +1,7 @@
## Get member for timeline event
so when writing sync, we persist the display name and avatar
the server might or might not support lazy loading
if it is a room we just joined

View file

@ -51,6 +51,7 @@ export class SessionLoadViewModel extends ViewModel {
this._error = err;
} finally {
this._loading = false;
// loadLabel in case of sc.loadError also gets updated through this
this.emitChange("loading");
}
}

View file

@ -159,6 +159,7 @@ export class SessionPickerViewModel extends ViewModel {
}
async import(json) {
try {
const data = JSON.parse(json);
const {sessionInfo} = data;
sessionInfo.comment = `Imported on ${new Date().toLocaleString()} from id ${sessionInfo.id}.`;
@ -166,6 +167,10 @@ export class SessionPickerViewModel extends ViewModel {
await this._storageFactory.import(sessionInfo.id, data.stores);
await this._sessionInfoStorage.add(sessionInfo);
this._sessions.set(new SessionItemViewModel(sessionInfo, this));
} catch (err) {
alert(err.message);
console.error(err);
}
}
async delete(id) {

View file

@ -0,0 +1,19 @@
const R0_5_0 = "r0.5.0";
export class ServerFeatures {
constructor(versionResponse) {
this._versionResponse = versionResponse;
}
_supportsVersion(version) {
if (!this._versionResponse) {
return false;
}
const {versions} = this._versionResponse;
return Array.isArray(versions) && versions.includes(version);
}
get lazyLoadMembers() {
return this._supportsVersion(R0_5_0);
}
}

View file

@ -94,7 +94,7 @@ export class SessionContainer {
this._status.set(LoadStatus.LoginFailed);
} else if (err instanceof ConnectionError) {
this._loginFailure = LoginFailure.Connection;
this._status.set(LoadStatus.LoginFailure);
this._status.set(LoadStatus.LoginFailed);
} else {
this._status.set(LoadStatus.Error);
}
@ -175,9 +175,14 @@ export class SessionContainer {
}
}
// only transition into Ready once the first sync has succeeded
this._waitForFirstSyncHandle = this._sync.status.waitFor(s => s === SyncStatus.Syncing);
this._waitForFirstSyncHandle = this._sync.status.waitFor(s => s === SyncStatus.Syncing || s === SyncStatus.Stopped);
try {
await this._waitForFirstSyncHandle.promise;
if (this._sync.status.get() === SyncStatus.Stopped) {
if (this._sync.error) {
throw this._sync.error;
}
}
} catch (err) {
// if dispose is called from stop, bail out
if (err instanceof AbortError) {

View file

@ -92,6 +92,7 @@ export class Sync {
storeNames.session,
storeNames.roomSummary,
storeNames.roomState,
storeNames.roomMembers,
storeNames.timelineEvents,
storeNames.timelineFragments,
storeNames.pendingEvents,
@ -116,6 +117,7 @@ export class Sync {
}
} catch(err) {
console.warn("aborting syncTxn because of error");
console.error(err);
// avoid corrupting state by only
// storing the sync up till the point
// the exception occurred

View file

@ -34,7 +34,7 @@ export class Room extends EventEmitter {
afterSync({summaryChanges, newTimelineEntries, newLiveKey, removedPendingEvents}) {
this._syncWriter.afterSync(newLiveKey);
if (summaryChanges) {
this._summary.afterSync(summaryChanges);
this._summary.applyChanges(summaryChanges);
this.emit("change");
this._emitCollectionChange(this);
}
@ -59,6 +59,40 @@ export class Room extends EventEmitter {
return this._sendQueue.enqueueEvent(eventType, content);
}
async loadMemberList() {
let members;
if (!this._summary.hasFetchedMembers) {
// we need to get the syncToken here!
const memberResponse = await this._hsApi.members(this._roomId, syncToken).response;
const txn = await this._storage.readWriteTxn([
this._storage.storeNames.roomSummary,
this._storage.storeNames.roomMembers,
]);
const summaryChanges = this._summary.writeHasFetchedMembers(true, txn);
const {roomMembers} = txn;
const memberEvents = memberResponse.chunk;
if (!Array.isArray(memberEvents)) {
throw new Error("malformed");
}
members = await Promise.all(memberEvents.map(async memberEvent => {
const userId = memberEvent && memberEvent.state_key;
if (!userId) {
throw new Error("malformed");
}
const memberData = await roomMembers.get(this._roomId, userId);
const member = updateOrCreateMember(this._roomId, memberData, event);
if (member) {
roomMembers.set(member.serialize());
}
return member;
}));
await txn.complete();
this._summary.applyChanges(summaryChanges);
}
return new MemberList(this._roomId, members, this._storage);
}
/** @public */
async fillGap(fragmentEntry, amount) {

View file

@ -0,0 +1,69 @@
export const EVENT_TYPE = "m.room.member";
export class RoomMember {
constructor(data) {
this._data = data;
}
static async updateOrCreateMember(roomId, memberData, memberEvent) {
if (!memberEvent) {
return;
}
const userId = memberEvent.state_key;
const {content} = memberEvent;
if (!userId || !content) {
return;
}
let member;
if (memberData) {
member = new RoomMember(memberData);
member.updateWithMemberEvent(memberEvent);
} else {
member = RoomMember.fromMemberEvent(this._roomId, memberEvent);
}
return member;
}
static fromMemberEvent(roomId, memberEvent) {
const userId = memberEvent && memberEvent.state_key;
if (!userId) {
return;
}
const member = new RoomMember({
roomId: roomId,
userId: userId,
avatarUrl: null,
displayName: null,
membership: null,
deviceTrackingStatus: 0,
});
member.updateWithMemberEvent(memberEvent);
return member;
}
get roomId() {
return this._data.roomId;
}
get userId() {
return this._data.userId;
}
updateWithMemberEvent(event) {
if (!event || !event.content) {
return;
}
const {content} = event;
this._data.membership = content.membership;
this._data.avatarUrl = content.avatar_url;
this._data.displayName = content.displayname;
}
serialize() {
return this.data;
}
}

View file

@ -82,6 +82,7 @@ class SummaryData {
this.heroes = copy ? copy.heroes : null;
this.canonicalAlias = copy ? copy.canonicalAlias : null;
this.altAliases = copy ? copy.altAliases : null;
this.hasFetchedMembers = copy ? copy.hasFetchedMembers : false;
this.cloned = copy ? true : false;
}
@ -132,6 +133,17 @@ export class RoomSummary {
return this._data.joinCount;
}
get hasFetchedMembers() {
return this._data.hasFetchedMembers;
}
writeHasFetchedMembers(value, txn) {
const data = new SummaryData(this._data);
data.hasFetchedMembers = value;
txn.roomSummary.set(data.serialize());
return data;
}
writeSync(roomResponse, membership, txn) {
// clear cloned flag, so cloneIfNeeded makes a copy and
// this._data is not modified if any field is changed.
@ -149,7 +161,7 @@ export class RoomSummary {
}
}
afterSync(data) {
applyChanges(data) {
this._data = data;
}

View file

@ -2,6 +2,7 @@ import {EventKey} from "../EventKey.js";
import {EventEntry} from "../entries/EventEntry.js";
import {FragmentBoundaryEntry} from "../entries/FragmentBoundaryEntry.js";
import {createEventEntry} from "./common.js";
import {RoomMember, EVENT_TYPE as MEMBER_EVENT_TYPE} from "../../RoomMember.js";
// Synapse bug? where the m.room.create event appears twice in sync response
// when first syncing the room
@ -81,9 +82,76 @@ export class SyncWriter {
return {oldFragment, newFragment};
}
async _writeMember(event, txn) {
if (!event) {
return;
}
const userId = event.state_key;
const {content} = event;
if (!userId || !content) {
return;
}
let member;
if (memberData) {
member = new RoomMember(memberData);
member.updateWithMemberEvent(event);
} else {
member = RoomMember.fromMemberEvent(this._roomId, event);
}
}
async _writeStateEvent(event, txn) {
if (event.type === MEMBER_EVENT_TYPE) {
const userId = event && event.state_key;
if (userId) {
const memberData = await txn.roomMembers.get(this._roomId, userId);
const member = updateOrCreateMember(this._roomId, memberData, event);
if (member) {
txn.roomMembers.set(member.serialize());
}
}
} else {
txn.roomState.set(this._roomId, event);
}
}
async _writeStateEvents(roomResponse, txn) {
// persist state
const {state, timeline} = roomResponse;
if (state.events) {
for (const event of state.events) {
await this._writeStateEvent(event, txn);
}
}
// persist live state events in timeline
if (timeline.events) {
for (const event of timeline.events) {
if (typeof event.state_key === "string") {
this._writeStateEvent(event, txn);
}
}
}
}
_writeTimeline(entries, timeline, currentKey, txn) {
if (timeline.events) {
const events = deduplicateEvents(timeline.events);
for(const event of events) {
currentKey = currentKey.nextKey();
const entry = createEventEntry(currentKey, this._roomId, event);
txn.timelineEvents.insert(entry);
entries.push(new EventEntry(entry, this._fragmentIdComparer));
}
}
return currentKey;
}
async writeSync(roomResponse, txn) {
const entries = [];
const timeline = roomResponse.timeline;
const {timeline} = roomResponse;
let currentKey = this._lastLiveKey;
if (!currentKey) {
// means we haven't synced this room yet (just joined or did initial sync)
@ -101,30 +169,10 @@ export class SyncWriter {
entries.push(FragmentBoundaryEntry.end(oldFragment, this._fragmentIdComparer));
entries.push(FragmentBoundaryEntry.start(newFragment, this._fragmentIdComparer));
}
if (timeline.events) {
const events = deduplicateEvents(timeline.events);
for(const event of events) {
currentKey = currentKey.nextKey();
const entry = createEventEntry(currentKey, this._roomId, event);
txn.timelineEvents.insert(entry);
entries.push(new EventEntry(entry, this._fragmentIdComparer));
}
}
// persist state
const state = roomResponse.state;
if (state.events) {
for (const event of state.events) {
txn.roomState.setStateEvent(this._roomId, event);
}
}
// persist live state events in timeline
if (timeline.events) {
for (const event of timeline.events) {
if (typeof event.state_key === "string") {
txn.roomState.setStateEvent(this._roomId, event);
}
}
}
await this._writeStateEvents(roomResponse, txn);
currentKey = this._writeTimeline(entries, timeline, currentKey, txn);
return {entries, newLiveKey: currentKey};
}

View file

@ -2,6 +2,7 @@ export const STORE_NAMES = Object.freeze([
"session",
"roomState",
"roomSummary",
"roomMembers",
"timelineEvents",
"timelineFragments",
"pendingEvents",

View file

@ -1,9 +1,10 @@
import {Storage} from "./Storage.js";
import { openDatabase, reqAsPromise } from "./utils.js";
import { exportSession, importSession } from "./export.js";
import { schema } from "./schema.js";
const sessionName = sessionId => `brawl_session_${sessionId}`;
const openDatabaseWithSessionId = sessionId => openDatabase(sessionName(sessionId), createStores, 1);
const openDatabaseWithSessionId = sessionId => openDatabase(sessionName(sessionId), createStores, schema.length);
export class StorageFactory {
async create(sessionId) {
@ -28,26 +29,10 @@ export class StorageFactory {
}
}
function createStores(db) {
db.createObjectStore("session", {keyPath: "key"});
// any way to make keys unique here? (just use put?)
db.createObjectStore("roomSummary", {keyPath: "roomId"});
async function createStores(db, txn, oldVersion, version) {
const startIdx = oldVersion || 0;
// need index to find live fragment? prooobably ok without for now
//key = room_id | fragment_id
db.createObjectStore("timelineFragments", {keyPath: "key"});
//key = room_id | fragment_id | event_index
const timelineEvents = db.createObjectStore("timelineEvents", {keyPath: "key"});
//eventIdKey = room_id | event_id
timelineEvents.createIndex("byEventId", "eventIdKey", {unique: true});
//key = room_id | event.type | event.state_key,
db.createObjectStore("roomState", {keyPath: "key"});
db.createObjectStore("pendingEvents", {keyPath: "key"});
// const roomMembers = db.createObjectStore("roomMembers", {keyPath: [
// "event.room_id",
// "event.content.membership",
// "event.state_key"
// ]});
// roomMembers.createIndex("byName", ["room_id", "content.name"]);
for(let i = startIdx; i < version; ++i) {
await schema[i](db, txn);
}
}

View file

@ -5,6 +5,7 @@ import {SessionStore} from "./stores/SessionStore.js";
import {RoomSummaryStore} from "./stores/RoomSummaryStore.js";
import {TimelineEventStore} from "./stores/TimelineEventStore.js";
import {RoomStateStore} from "./stores/RoomStateStore.js";
import {RoomMemberStore} from "./stores/RoomMemberStore.js";
import {TimelineFragmentStore} from "./stores/TimelineFragmentStore.js";
import {PendingEventStore} from "./stores/PendingEventStore.js";
@ -56,6 +57,10 @@ export class Transaction {
return this._store("roomState", idbStore => new RoomStateStore(idbStore));
}
get roomMembers() {
return this._store("roomMembers", idbStore => new RoomMemberStore(idbStore));
}
get pendingEvents() {
return this._store("pendingEvents", idbStore => new PendingEventStore(idbStore));
}

View file

@ -0,0 +1,65 @@
import {iterateCursor} from "./utils.js";
import {RoomMember, EVENT_TYPE as MEMBER_EVENT_TYPE} from "../../room/RoomMember.js";
// FUNCTIONS SHOULD ONLY BE APPENDED!!
// the index in the array is the database version
export const schema = [
createInitialStores,
createMemberStore,
];
// TODO: how to deal with git merge conflicts of this array?
// how do we deal with schema updates vs existing data migration in a way that
//v1
function createInitialStores(db) {
db.createObjectStore("session", {keyPath: "key"});
// any way to make keys unique here? (just use put?)
db.createObjectStore("roomSummary", {keyPath: "roomId"});
// need index to find live fragment? prooobably ok without for now
//key = room_id | fragment_id
db.createObjectStore("timelineFragments", {keyPath: "key"});
//key = room_id | fragment_id | event_index
const timelineEvents = db.createObjectStore("timelineEvents", {keyPath: "key"});
//eventIdKey = room_id | event_id
timelineEvents.createIndex("byEventId", "eventIdKey", {unique: true});
//key = room_id | event.type | event.state_key,
db.createObjectStore("roomState", {keyPath: "key"});
db.createObjectStore("pendingEvents", {keyPath: "key"});
}
//v2
async function createMemberStore(db, txn) {
const roomMembers = db.createObjectStore("roomMembers", {keyPath: [
"roomId",
"userId"
]});
// migrate existing member state events over
const roomState = txn.objectStore("roomState");
await iterateCursor(roomState.openCursor(), entry => {
if (entry.event.type === MEMBER_EVENT_TYPE) {
roomState.delete(entry.key);
const member = RoomMember.fromMemberEvent(entry.roomId, entry.event);
if (member) {
roomMembers.add(member.serialize());
}
}
});
}
function migrateKeyPathToArray(db, isNew) {
if (isNew) {
// create the new stores with the final name
} else {
// create the new stores with a tmp name
// migrate the data over
// change the name
}
// maybe it is ok to just run all the migration steps?
// it might be a bit slower to create a store twice ...
// but at least the path of migration or creating a new store
// will go through the same code
//
// might not even be slower, as this is all happening within one transaction
}

View file

@ -1,18 +0,0 @@
// no historical members for now
class MemberStore {
async getMember(roomId, userId) {
}
/* async getMemberAtSortKey(roomId, userId, sortKey) {
} */
// multiple members here? does it happen at same sort key?
async setMembers(roomId, members) {
}
async getSortedMembers(roomId, offset, amount) {
}
}

View file

@ -0,0 +1,24 @@
// no historical members for now
export class RoomMemberStore {
constructor(roomMembersStore) {
this._roomMembersStore = roomMembersStore;
}
get(roomId, userId) {
return this._roomMembersStore.get([roomId, userId]);
}
async set(member) {
return this._roomMembersStore.put(member);
}
/*
async getMemberAtSortKey(roomId, userId, sortKey) {
}
async getSortedMembers(roomId, offset, amount) {
}
*/
}

View file

@ -3,15 +3,15 @@ export class RoomStateStore {
this._roomStateStore = idbStore;
}
async getEvents(type) {
async getAllForType(type) {
}
async getEventsForKey(type, stateKey) {
async get(type, stateKey) {
}
async setStateEvent(roomId, event) {
async set(roomId, event) {
const key = `${roomId}|${event.type}|${event.state_key}`;
const entry = {roomId, event, key};
return this._roomStateStore.put(entry);

View file

@ -15,8 +15,9 @@ export function openDatabase(name, createObjectStore, version) {
const req = window.indexedDB.open(name, version);
req.onupgradeneeded = (ev) => {
const db = ev.target.result;
const txn = ev.target.transaction;
const oldVersion = ev.oldVersion;
createObjectStore(db, oldVersion, version);
createObjectStore(db, txn, oldVersion, version);
};
return reqAsPromise(req);
}
@ -52,7 +53,10 @@ export function iterateCursor(cursorRequest, processValue) {
resolve(false);
return; // end of results
}
const {done, jumpTo} = processValue(cursor.value, cursor.key);
const result = processValue(cursor.value, cursor.key);
const done = result && result.done;
const jumpTo = result && result.jumpTo;
if (done) {
resolve(true);
} else if(jumpTo) {