hydrogen-web/src/matrix/storage/idb/schema.ts

246 lines
11 KiB
TypeScript
Raw Normal View History

import {IDOMStorage} from "./types";
import {ITransaction} from "./QueryTarget";
2021-08-13 01:58:36 +05:30
import {iterateCursor, NOT_DONE, reqAsPromise} from "./utils";
2020-08-19 19:58:09 +05:30
import {RoomMember, EVENT_TYPE as MEMBER_EVENT_TYPE} from "../../room/members/RoomMember.js";
import {addRoomToIdentity} from "../../e2ee/DeviceTracker.js";
import {SESSION_E2EE_KEY_PREFIX} from "../../e2ee/common.js";
import {SummaryData} from "../../room/RoomSummary";
import {RoomMemberStore, MemberData} from "./stores/RoomMemberStore";
2021-08-13 01:58:36 +05:30
import {RoomStateEntry} from "./stores/RoomStateStore";
2021-08-11 04:40:55 +05:30
import {SessionStore} from "./stores/SessionStore";
import {Store} from "./Store";
2021-08-12 23:35:55 +05:30
import {encodeScopeTypeKey} from "./stores/OperationStore";
import {MAX_UNICODE} from "./stores/common";
import {LogItem} from "../../../logging/LogItem.js";
2020-06-27 02:56:24 +05:30
export type MigrationFunc = (db: IDBDatabase, txn: IDBTransaction, localStorage: IDOMStorage, log: LogItem) => Promise<void> | void;
2020-06-27 02:56:24 +05:30
// FUNCTIONS SHOULD ONLY BE APPENDED!!
// the index in the array is the database version
export const schema: MigrationFunc[] = [
2020-06-27 02:56:24 +05:30
createInitialStores,
createMemberStore,
migrateSession,
createE2EEStores,
migrateEncryptionFlag,
2021-04-20 16:32:50 +05:30
createAccountDataStore,
2021-05-04 17:04:42 +05:30
createInviteStore,
createArchivedRoomSummaryStore,
migrateOperationScopeIndex,
2021-06-03 20:14:35 +05:30
createTimelineRelationsStore,
fixMissingRoomsInUserIdentities,
changeSSSSKeyPrefix,
backupAndRestoreE2EEAccountToLocalStorage
2020-06-27 02:56:24 +05:30
];
// TODO: how to deal with git merge conflicts of this array?
2021-08-13 01:58:36 +05:30
// TypeScript note: for now, do not bother introducing interfaces / alias
// for old schemas. Just take them as `any`.
2020-06-27 02:56:24 +05:30
// how do we deal with schema updates vs existing data migration in a way that
//v1
2021-08-13 01:58:36 +05:30
function createInitialStores(db: IDBDatabase): void {
2020-06-27 02:56:24 +05:30
db.createObjectStore("session", {keyPath: "key"});
// any way to make keys unique here? (just use put?)
db.createObjectStore("roomSummary", {keyPath: "roomId"});
// need index to find live fragment? prooobably ok without for now
//key = room_id | fragment_id
db.createObjectStore("timelineFragments", {keyPath: "key"});
//key = room_id | fragment_id | event_index
const timelineEvents = db.createObjectStore("timelineEvents", {keyPath: "key"});
//eventIdKey = room_id | event_id
timelineEvents.createIndex("byEventId", "eventIdKey", {unique: true});
//key = room_id | event.type | event.state_key,
db.createObjectStore("roomState", {keyPath: "key"});
db.createObjectStore("pendingEvents", {keyPath: "key"});
}
//v2
2021-08-13 01:58:36 +05:30
async function createMemberStore(db: IDBDatabase, txn: IDBTransaction): Promise<void> {
// Cast ok here because only "set" is used
const roomMembers = new RoomMemberStore(db.createObjectStore("roomMembers", {keyPath: "key"}) as any);
2020-06-27 02:56:24 +05:30
// migrate existing member state events over
const roomState = txn.objectStore("roomState");
2021-08-13 01:58:36 +05:30
await iterateCursor<RoomStateEntry>(roomState.openCursor(), entry => {
2020-06-27 02:56:24 +05:30
if (entry.event.type === MEMBER_EVENT_TYPE) {
roomState.delete(entry.key);
const member = RoomMember.fromMemberEvent(entry.roomId, entry.event);
if (member) {
roomMembers.set(member.serialize());
2020-06-27 02:56:24 +05:30
}
}
2021-08-13 01:58:36 +05:30
return NOT_DONE;
2020-06-27 02:56:24 +05:30
});
}
2020-08-31 18:08:03 +05:30
//v3
async function migrateSession(db: IDBDatabase, txn: IDBTransaction, localStorage: IDOMStorage): Promise<void> {
const session = txn.objectStore("session");
try {
const PRE_MIGRATION_KEY = 1;
const entry = await reqAsPromise(session.get(PRE_MIGRATION_KEY));
if (entry) {
session.delete(PRE_MIGRATION_KEY);
const {syncToken, syncFilterId, serverVersions} = entry.value;
2021-08-13 01:58:36 +05:30
// Cast ok here because only "set" is used and we don't look into return
const store = new SessionStore(session as any, localStorage);
store.set("sync", {token: syncToken, filterId: syncFilterId});
store.set("serverVersions", serverVersions);
}
} catch (err) {
txn.abort();
console.error("could not migrate session", err.stack);
}
}
2020-08-31 18:08:03 +05:30
//v4
2021-08-13 01:58:36 +05:30
function createE2EEStores(db: IDBDatabase): void {
2020-08-31 18:08:03 +05:30
db.createObjectStore("userIdentities", {keyPath: "userId"});
const deviceIdentities = db.createObjectStore("deviceIdentities", {keyPath: "key"});
deviceIdentities.createIndex("byCurve25519Key", "curve25519Key", {unique: true});
2020-09-01 21:29:59 +05:30
db.createObjectStore("olmSessions", {keyPath: "key"});
2020-09-02 17:54:38 +05:30
db.createObjectStore("inboundGroupSessions", {keyPath: "key"});
2020-09-03 21:19:20 +05:30
db.createObjectStore("outboundGroupSessions", {keyPath: "roomId"});
db.createObjectStore("groupSessionDecryptions", {keyPath: "key"});
2020-09-11 18:10:05 +05:30
const operations = db.createObjectStore("operations", {keyPath: "id"});
operations.createIndex("byTypeAndScope", "typeScopeKey", {unique: false});
}
// v5
2021-08-13 01:58:36 +05:30
async function migrateEncryptionFlag(db: IDBDatabase, txn: IDBTransaction): Promise<void> {
// migrate room summary isEncrypted -> encryption prop
const roomSummary = txn.objectStore("roomSummary");
const roomState = txn.objectStore("roomState");
2021-08-13 01:58:36 +05:30
const summaries: any[] = [];
await iterateCursor<any>(roomSummary.openCursor(), summary => {
summaries.push(summary);
2021-08-13 01:58:36 +05:30
return NOT_DONE;
});
for (const summary of summaries) {
const encryptionEntry = await reqAsPromise(roomState.get(`${summary.roomId}|m.room.encryption|`));
if (encryptionEntry) {
summary.encryption = encryptionEntry?.event?.content;
delete summary.isEncrypted;
roomSummary.put(summary);
}
}
}
// v6
2021-08-13 01:58:36 +05:30
function createAccountDataStore(db: IDBDatabase): void {
db.createObjectStore("accountData", {keyPath: "type"});
}
2021-04-20 16:32:50 +05:30
// v7
2021-08-13 01:58:36 +05:30
function createInviteStore(db: IDBDatabase): void {
2021-04-20 16:32:50 +05:30
db.createObjectStore("invites", {keyPath: "roomId"});
}
2021-05-04 17:04:42 +05:30
// v8
2021-08-13 01:58:36 +05:30
function createArchivedRoomSummaryStore(db: IDBDatabase): void {
db.createObjectStore("archivedRoomSummary", {keyPath: "summary.roomId"});
}
// v9
2021-08-13 01:58:36 +05:30
async function migrateOperationScopeIndex(db: IDBDatabase, txn: IDBTransaction): Promise<void> {
try {
const operations = txn.objectStore("operations");
operations.deleteIndex("byTypeAndScope");
2021-08-13 01:58:36 +05:30
await iterateCursor<any>(operations.openCursor(), (op, key, cur) => {
const {typeScopeKey} = op;
delete op.typeScopeKey;
const [type, scope] = typeScopeKey.split("|");
op.scopeTypeKey = encodeScopeTypeKey(scope, type);
cur.update(op);
2021-08-13 01:58:36 +05:30
return NOT_DONE;
});
operations.createIndex("byScopeAndType", "scopeTypeKey", {unique: false});
} catch (err) {
txn.abort();
console.error("could not migrate operations", err.stack);
}
2021-06-03 20:14:35 +05:30
}
//v10
2021-08-13 01:58:36 +05:30
function createTimelineRelationsStore(db: IDBDatabase) : void {
db.createObjectStore("timelineRelations", {keyPath: "key"});
}
//v11 doesn't change the schema, but ensures all userIdentities have all the roomIds they should (see #470)
async function fixMissingRoomsInUserIdentities(db: IDBDatabase, txn: IDBTransaction, localStorage: IDOMStorage, log: LogItem) {
const roomSummaryStore = txn.objectStore("roomSummary");
const trackedRoomIds: string[] = [];
await iterateCursor<SummaryData>(roomSummaryStore.openCursor(), roomSummary => {
if (roomSummary.isTrackingMembers) {
trackedRoomIds.push(roomSummary.roomId);
}
return NOT_DONE;
});
const outboundGroupSessionsStore = txn.objectStore("outboundGroupSessions");
const userIdentitiesStore: IDBObjectStore = txn.objectStore("userIdentities");
const roomMemberStore = txn.objectStore("roomMembers");
for (const roomId of trackedRoomIds) {
let foundMissing = false;
const joinedUserIds: string[] = [];
const memberRange = IDBKeyRange.bound(roomId, `${roomId}|${MAX_UNICODE}`, true, true);
await log.wrap({l: "room", id: roomId}, async log => {
await iterateCursor<MemberData>(roomMemberStore.openCursor(memberRange), member => {
if (member.membership === "join") {
joinedUserIds.push(member.userId);
}
return NOT_DONE;
});
log.set("joinedUserIds", joinedUserIds.length);
for (const userId of joinedUserIds) {
const identity = await reqAsPromise(userIdentitiesStore.get(userId));
const originalRoomCount = identity?.roomIds?.length;
const updatedIdentity = addRoomToIdentity(identity, userId, roomId);
if (updatedIdentity) {
log.log({l: `fixing up`, id: userId,
roomsBefore: originalRoomCount, roomsAfter: updatedIdentity.roomIds.length});
userIdentitiesStore.put(updatedIdentity);
foundMissing = true;
}
}
log.set("foundMissing", foundMissing);
if (foundMissing) {
// clear outbound megolm session,
// so we'll create a new one on the next message that will be properly shared
outboundGroupSessionsStore.delete(roomId);
}
});
}
}
// v12 move ssssKey to e2ee:ssssKey so it will get backed up in the next step
async function changeSSSSKeyPrefix(db: IDBDatabase, txn: IDBTransaction) {
const session = txn.objectStore("session");
const ssssKey = await reqAsPromise(session.get("ssssKey"));
if (ssssKey) {
session.put({key: `${SESSION_E2EE_KEY_PREFIX}ssssKey`, value: ssssKey});
}
}
// v13
async function backupAndRestoreE2EEAccountToLocalStorage(db: IDBDatabase, txn: IDBTransaction, localStorage: IDOMStorage, log: LogItem) {
const session = txn.objectStore("session");
// the Store object gets passed in several things through the Transaction class (a wrapper around IDBTransaction),
// the only thing we should need here is the databaseName though, so we mock it out.
// ideally we should have an easier way to go from the idb primitive layer to the specific store classes where
// we implement logic, but for now we need this.
const databaseNameHelper: ITransaction = {
databaseName: db.name,
get idbFactory(): IDBFactory { throw new Error("unused");},
get IDBKeyRange(): typeof IDBKeyRange { throw new Error("unused");},
addWriteError() {},
};
const sessionStore = new SessionStore(new Store(session, databaseNameHelper), localStorage);
// if we already have an e2ee identity, write a backup to local storage.
// further updates to e2ee keys in the session store will also write to local storage from 0.2.15 on,
// but here we make sure a backup is immediately created after installing the update and we don't wait until
// the olm account needs to change
sessionStore.writeE2EEIdentityToLocalStorage();
// and if we already have a backup, restore it now for any missing key in idb.
// this will restore the backup every time the idb database is dropped as it will
// run through all the migration steps when recreating it.
const restored = await sessionStore.tryRestoreE2EEIdentityFromLocalStorage(log);
log.set("restored", restored);
}