forked from mystiq/hydrogen-web
log login & loading a session
This commit is contained in:
parent
ef1e867dee
commit
57bb75e864
5 changed files with 116 additions and 92 deletions
|
@ -96,7 +96,6 @@ export class Session {
|
|||
|
||||
// called once this._e2eeAccount is assigned
|
||||
_setupEncryption() {
|
||||
console.log("loaded e2ee account with keys", this._e2eeAccount.identityKeys);
|
||||
// TODO: this should all go in a wrapper in e2ee/ that is bootstrapped by passing in the account
|
||||
// and can create RoomEncryption objects and handle encrypted to_device messages and device list changes.
|
||||
const senderKeyLock = new LockMap();
|
||||
|
@ -228,7 +227,7 @@ export class Session {
|
|||
}
|
||||
|
||||
/** @internal */
|
||||
async createIdentity() {
|
||||
async createIdentity(log) {
|
||||
if (this._olm) {
|
||||
if (!this._e2eeAccount) {
|
||||
this._e2eeAccount = await E2EEAccount.create({
|
||||
|
@ -240,15 +239,16 @@ export class Session {
|
|||
olmWorker: this._olmWorker,
|
||||
storage: this._storage,
|
||||
});
|
||||
log.set("keys", this._e2eeAccount.identityKeys);
|
||||
this._setupEncryption();
|
||||
}
|
||||
await this._e2eeAccount.generateOTKsIfNeeded(this._storage);
|
||||
await this._e2eeAccount.uploadKeys(this._storage);
|
||||
await this._e2eeAccount.generateOTKsIfNeeded(this._storage, log);
|
||||
await log.wrap("uploadKeys", log => this._e2eeAccount.uploadKeys(this._storage, log));
|
||||
}
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
async load() {
|
||||
async load(log) {
|
||||
const txn = this._storage.readTxn([
|
||||
this._storage.storeNames.session,
|
||||
this._storage.storeNames.roomSummary,
|
||||
|
@ -271,6 +271,7 @@ export class Session {
|
|||
txn
|
||||
});
|
||||
if (this._e2eeAccount) {
|
||||
log.set("keys", this._e2eeAccount.identityKeys);
|
||||
this._setupEncryption();
|
||||
}
|
||||
}
|
||||
|
@ -279,7 +280,7 @@ export class Session {
|
|||
const rooms = await txn.roomSummary.getAll();
|
||||
await Promise.all(rooms.map(summary => {
|
||||
const room = this.createRoom(summary.roomId, pendingEventsByRoomId.get(summary.roomId));
|
||||
return room.load(summary, txn);
|
||||
return log.wrap("room", log => room.load(summary, txn, log));
|
||||
}));
|
||||
}
|
||||
|
||||
|
@ -297,7 +298,7 @@ export class Session {
|
|||
* and useful to store so we can later tell what capabilities
|
||||
* our homeserver has.
|
||||
*/
|
||||
async start(lastVersionResponse) {
|
||||
async start(lastVersionResponse, log) {
|
||||
if (lastVersionResponse) {
|
||||
// store /versions response
|
||||
const txn = this._storage.readWriteTxn([
|
||||
|
@ -334,7 +335,7 @@ export class Session {
|
|||
if (roomOperations) {
|
||||
roomOperationsByType = groupBy(roomOperations, r => r.type);
|
||||
}
|
||||
room.start(roomOperationsByType);
|
||||
room.start(roomOperationsByType, log);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -438,7 +439,7 @@ export class Session {
|
|||
// are about to receive messages for
|
||||
// (https://github.com/vector-im/riot-web/issues/2782).
|
||||
if (!isCatchupSync) {
|
||||
const needsToUploadOTKs = await this._e2eeAccount.generateOTKsIfNeeded(this._storage);
|
||||
const needsToUploadOTKs = await this._e2eeAccount.generateOTKsIfNeeded(this._storage, log);
|
||||
if (needsToUploadOTKs) {
|
||||
promises.push(log.wrap("uploadKeys", log => this._e2eeAccount.uploadKeys(this._storage, log)));
|
||||
}
|
||||
|
|
|
@ -73,29 +73,35 @@ export class SessionContainer {
|
|||
return;
|
||||
}
|
||||
this._status.set(LoadStatus.Loading);
|
||||
this._platform.logger.run("load session", async log => {
|
||||
log.set("id", sessionId);
|
||||
try {
|
||||
const sessionInfo = await this._platform.sessionInfoStorage.get(sessionId);
|
||||
if (!sessionInfo) {
|
||||
throw new Error("Invalid session id: " + sessionId);
|
||||
}
|
||||
await this._loadSessionInfo(sessionInfo, false);
|
||||
await this._loadSessionInfo(sessionInfo, false, log);
|
||||
log.set("status", this._status.get());
|
||||
} catch (err) {
|
||||
log.catch(err);
|
||||
this._error = err;
|
||||
this._status.set(LoadStatus.Error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async startWithLogin(homeServer, username, password) {
|
||||
if (this._status.get() !== LoadStatus.NotLoading) {
|
||||
return;
|
||||
}
|
||||
this._platform.logger.run("login", async log => {
|
||||
this._status.set(LoadStatus.Login);
|
||||
const clock = this._platform.clock;
|
||||
let sessionInfo;
|
||||
try {
|
||||
const request = this._platform.request;
|
||||
const hsApi = new HomeServerApi({homeServer, request, createTimeout: clock.createTimeout});
|
||||
const loginData = await hsApi.passwordLogin(username, password, "Hydrogen").response();
|
||||
const loginData = await hsApi.passwordLogin(username, password, "Hydrogen", {log}).response();
|
||||
const sessionId = this.createNewSessionId();
|
||||
sessionInfo = {
|
||||
id: sessionId,
|
||||
|
@ -105,6 +111,7 @@ export class SessionContainer {
|
|||
accessToken: loginData.access_token,
|
||||
lastUsed: clock.now()
|
||||
};
|
||||
log.set("id", sessionId);
|
||||
await this._platform.sessionInfoStorage.add(sessionInfo);
|
||||
} catch (err) {
|
||||
this._error = err;
|
||||
|
@ -114,6 +121,7 @@ export class SessionContainer {
|
|||
} else {
|
||||
this._loginFailure = LoginFailure.Unknown;
|
||||
}
|
||||
log.set("loginFailure", this._loginFailure);
|
||||
this._status.set(LoadStatus.LoginFailed);
|
||||
} else if (err instanceof ConnectionError) {
|
||||
this._loginFailure = LoginFailure.Connection;
|
||||
|
@ -127,14 +135,17 @@ export class SessionContainer {
|
|||
// LoadStatus.Error in case of an error,
|
||||
// so separate try/catch
|
||||
try {
|
||||
await this._loadSessionInfo(sessionInfo, true);
|
||||
await this._loadSessionInfo(sessionInfo, true, log);
|
||||
log.set("status", this._status.get());
|
||||
} catch (err) {
|
||||
log.catch(err);
|
||||
this._error = err;
|
||||
this._status.set(LoadStatus.Error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async _loadSessionInfo(sessionInfo, isNewLogin) {
|
||||
async _loadSessionInfo(sessionInfo, isNewLogin, log) {
|
||||
const clock = this._platform.clock;
|
||||
this._sessionStartedByReconnector = false;
|
||||
this._status.set(LoadStatus.Loading);
|
||||
|
@ -178,24 +189,26 @@ export class SessionContainer {
|
|||
mediaRepository,
|
||||
platform: this._platform,
|
||||
});
|
||||
await this._session.load();
|
||||
await this._session.load(log);
|
||||
if (isNewLogin) {
|
||||
this._status.set(LoadStatus.SessionSetup);
|
||||
await this._session.createIdentity();
|
||||
await log.wrap("createIdentity", log => this._session.createIdentity(log));
|
||||
}
|
||||
|
||||
this._sync = new Sync({hsApi: this._requestScheduler.hsApi, storage: this._storage, session: this._session, logger: this._platform.logger});
|
||||
// notify sync and session when back online
|
||||
this._reconnectSubscription = this._reconnector.connectionStatus.subscribe(state => {
|
||||
if (state === ConnectionStatus.Online) {
|
||||
this._platform.logger.runDetached("reconnect", async log => {
|
||||
// needs to happen before sync and session or it would abort all requests
|
||||
this._requestScheduler.start();
|
||||
this._sync.start();
|
||||
this._sessionStartedByReconnector = true;
|
||||
this._session.start(this._reconnector.lastVersionsResponse);
|
||||
await log.wrap("session start", log => this._session.start(this._reconnector.lastVersionsResponse, log));
|
||||
});
|
||||
}
|
||||
});
|
||||
await this._waitForFirstSync();
|
||||
await log.wrap("wait first sync", log => this._waitForFirstSync(log));
|
||||
|
||||
this._status.set(LoadStatus.Ready);
|
||||
|
||||
|
@ -204,12 +217,13 @@ export class SessionContainer {
|
|||
// started to session, so check first
|
||||
// to prevent an extra /versions request
|
||||
if (!this._sessionStartedByReconnector) {
|
||||
const lastVersionsResponse = await hsApi.versions({timeout: 10000}).response();
|
||||
this._session.start(lastVersionsResponse);
|
||||
const lastVersionsResponse = await hsApi.versions({timeout: 10000, log}).response();
|
||||
// log as ref as we don't want to await it
|
||||
await log.wrap("session start", log => this._session.start(lastVersionsResponse, log));
|
||||
}
|
||||
}
|
||||
|
||||
async _waitForFirstSync() {
|
||||
async _waitForFirstSync(log) {
|
||||
try {
|
||||
this._sync.start();
|
||||
this._status.set(LoadStatus.FirstSync);
|
||||
|
|
|
@ -80,22 +80,24 @@ export class Account {
|
|||
return this._identityKeys;
|
||||
}
|
||||
|
||||
async uploadKeys(storage) {
|
||||
async uploadKeys(storage, log) {
|
||||
const oneTimeKeys = JSON.parse(this._account.one_time_keys());
|
||||
// only one algorithm supported by olm atm, so hardcode its name
|
||||
const oneTimeKeysEntries = Object.entries(oneTimeKeys.curve25519);
|
||||
if (oneTimeKeysEntries.length || !this._areDeviceKeysUploaded) {
|
||||
const payload = {};
|
||||
if (!this._areDeviceKeysUploaded) {
|
||||
log.set("identity", true);
|
||||
const identityKeys = JSON.parse(this._account.identity_keys());
|
||||
payload.device_keys = this._deviceKeysPayload(identityKeys);
|
||||
}
|
||||
if (oneTimeKeysEntries.length) {
|
||||
log.set("otks", true);
|
||||
payload.one_time_keys = this._oneTimeKeysPayload(oneTimeKeysEntries);
|
||||
}
|
||||
const response = await this._hsApi.uploadKeys(payload, /*{log}*/).response();
|
||||
const response = await this._hsApi.uploadKeys(payload, {log}).response();
|
||||
this._serverOTKCount = response?.one_time_key_counts?.signed_curve25519;
|
||||
// log.set("serverOTKCount", this._serverOTKCount);
|
||||
log.set("serverOTKCount", this._serverOTKCount);
|
||||
// TODO: should we not modify this in the txn like we do elsewhere?
|
||||
// we'd have to pickle and unpickle the account to clone it though ...
|
||||
// and the upload has succeed at this point, so in-memory would be correct
|
||||
|
@ -114,7 +116,7 @@ export class Account {
|
|||
}
|
||||
}
|
||||
|
||||
async generateOTKsIfNeeded(storage) {
|
||||
async generateOTKsIfNeeded(storage, log) {
|
||||
const maxOTKs = this._account.max_number_of_one_time_keys();
|
||||
const limit = maxOTKs / 2;
|
||||
if (this._serverOTKCount < limit) {
|
||||
|
@ -128,11 +130,16 @@ export class Account {
|
|||
if (totalOTKCount < limit) {
|
||||
// we could in theory also generated the keys and store them in
|
||||
// writeSync, but then we would have to clone the account to avoid side-effects.
|
||||
await this._updateSessionStorage(storage, sessionStore => {
|
||||
await log.wrap("generate otks", log => this._updateSessionStorage(storage, sessionStore => {
|
||||
const newKeyCount = maxOTKs - totalOTKCount;
|
||||
log.set("max", maxOTKs);
|
||||
log.set("server", this._serverOTKCount);
|
||||
log.set("unpublished", unpublishedOTKCount);
|
||||
log.set("new", newKeyCount);
|
||||
log.set("limit", limit);
|
||||
this._account.generate_one_time_keys(newKeyCount);
|
||||
sessionStore.set(ACCOUNT_SESSION_KEY, this._account.pickle(this._pickleKey));
|
||||
});
|
||||
}));
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -167,6 +167,7 @@ export class Room extends EventEmitter {
|
|||
throw err;
|
||||
}
|
||||
await writeTxn.complete();
|
||||
// TODO: log decryption errors here
|
||||
decryption.applyToEntries(entries);
|
||||
if (this._observedEvents) {
|
||||
this._observedEvents.updateEvents(entries);
|
||||
|
@ -318,30 +319,29 @@ export class Room extends EventEmitter {
|
|||
async afterSyncCompleted(changes, log) {
|
||||
log.set("id", this.id);
|
||||
if (this._roomEncryption) {
|
||||
// TODO: pass log to flushPendingRoomKeyShares once we also have a logger in `start`
|
||||
await this._roomEncryption.flushPendingRoomKeyShares(this._hsApi, null);
|
||||
await this._roomEncryption.flushPendingRoomKeyShares(this._hsApi, null, log);
|
||||
}
|
||||
}
|
||||
|
||||
/** @package */
|
||||
async start(pendingOperations) {
|
||||
start(pendingOperations, parentLog) {
|
||||
if (this._roomEncryption) {
|
||||
try {
|
||||
const roomKeyShares = pendingOperations?.get("share_room_key");
|
||||
if (roomKeyShares) {
|
||||
// if we got interrupted last time sending keys to newly joined members
|
||||
await this._roomEncryption.flushPendingRoomKeyShares(this._hsApi, roomKeyShares);
|
||||
}
|
||||
} catch (err) {
|
||||
// we should not throw here
|
||||
console.error(`could not send out (all) pending room keys for room ${this.id}`, err.stack);
|
||||
parentLog.wrapDetached("flush room keys", log => {
|
||||
log.set("id", this.id);
|
||||
return this._roomEncryption.flushPendingRoomKeyShares(this._hsApi, roomKeyShares, log);
|
||||
});
|
||||
}
|
||||
}
|
||||
this._sendQueue.resumeSending();
|
||||
|
||||
this._sendQueue.resumeSending(parentLog);
|
||||
}
|
||||
|
||||
/** @package */
|
||||
async load(summary, txn) {
|
||||
async load(summary, txn, log) {
|
||||
log.set("id", this.id);
|
||||
try {
|
||||
this._summary.load(summary);
|
||||
if (this._summary.data.encryption) {
|
||||
|
@ -354,7 +354,7 @@ export class Room extends EventEmitter {
|
|||
const changes = await this._heroes.calculateChanges(this._summary.data.heroes, [], txn);
|
||||
this._heroes.applyChanges(changes, this._summary.data);
|
||||
}
|
||||
return this._syncWriter.load(txn);
|
||||
return this._syncWriter.load(txn, log);
|
||||
} catch (err) {
|
||||
throw new WrappedError(`Could not load room ${this._roomId}`, err);
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ export class SyncWriter {
|
|||
this._lastLiveKey = null;
|
||||
}
|
||||
|
||||
async load(txn) {
|
||||
async load(txn, log) {
|
||||
const liveFragment = await txn.timelineFragments.liveFragment(this._roomId);
|
||||
if (liveFragment) {
|
||||
const [lastEvent] = await txn.timelineEvents.lastEvents(this._roomId, liveFragment.id, 1);
|
||||
|
@ -53,7 +53,9 @@ export class SyncWriter {
|
|||
}
|
||||
// if there is no live fragment, we don't create it here because load gets a readonly txn.
|
||||
// this is on purpose, load shouldn't modify the store
|
||||
console.log("room persister load", this._roomId, this._lastLiveKey && this._lastLiveKey.toString());
|
||||
if (this._lastLiveKey) {
|
||||
log.set("live key", this._lastLiveKey.toString());
|
||||
}
|
||||
}
|
||||
|
||||
async _createLiveFragment(txn, previousToken) {
|
||||
|
|
Loading…
Reference in a new issue