between syncs, generate more otks if needed, and upload them
This commit is contained in:
parent
681dfdf62b
commit
a1ba5d7dba
3 changed files with 41 additions and 0 deletions
|
@ -60,6 +60,7 @@ export class Session {
|
||||||
}
|
}
|
||||||
await txn.complete();
|
await txn.complete();
|
||||||
}
|
}
|
||||||
|
await this._e2eeAccount.generateOTKsIfNeeded(this._storage);
|
||||||
await this._e2eeAccount.uploadKeys(this._storage);
|
await this._e2eeAccount.uploadKeys(this._storage);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -178,6 +179,15 @@ export class Session {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async afterSyncCompleted() {
|
||||||
|
const needsToUploadOTKs = await this._e2eeAccount.generateOTKsIfNeeded(this._storage);
|
||||||
|
if (needsToUploadOTKs) {
|
||||||
|
// TODO: we could do this in parallel with sync if it proves to be too slow
|
||||||
|
// but I'm not sure how to not swallow errors in that case
|
||||||
|
await this._e2eeAccount.uploadKeys(this._storage);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
get syncToken() {
|
get syncToken() {
|
||||||
return this._syncInfo?.token;
|
return this._syncInfo?.token;
|
||||||
}
|
}
|
||||||
|
|
|
@ -100,6 +100,12 @@ export class Sync {
|
||||||
this._status.set(SyncStatus.Stopped);
|
this._status.set(SyncStatus.Stopped);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
try {
|
||||||
|
await this._session.afterSyncCompleted();
|
||||||
|
} catch (err) {
|
||||||
|
console.err("error during after sync completed, continuing to sync.", err.stack);
|
||||||
|
// swallowing error here apart from logging
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -95,6 +95,31 @@ export class Account {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async generateOTKsIfNeeded(storage) {
|
||||||
|
const maxOTKs = this._account.max_number_of_one_time_keys();
|
||||||
|
const limit = maxOTKs / 2;
|
||||||
|
if (this._serverOTKCount < limit) {
|
||||||
|
// TODO: cache unpublishedOTKCount, so we don't have to parse this JSON on every sync iteration
|
||||||
|
// for now, we only determine it when serverOTKCount is sufficiently low, which is should rarely be,
|
||||||
|
// and recheck
|
||||||
|
const oneTimeKeys = JSON.parse(this._account.one_time_keys());
|
||||||
|
const oneTimeKeysEntries = Object.entries(oneTimeKeys.curve25519);
|
||||||
|
const unpublishedOTKCount = oneTimeKeysEntries.length;
|
||||||
|
const totalOTKCount = this._serverOTKCount + unpublishedOTKCount;
|
||||||
|
if (totalOTKCount < limit) {
|
||||||
|
// we could in theory also generated the keys and store them in
|
||||||
|
// writeSync, but then we would have to clone the account to avoid side-effects.
|
||||||
|
await this._updateSessionStorage(storage, sessionStore => {
|
||||||
|
const newKeyCount = maxOTKs - totalOTKCount;
|
||||||
|
this._account.generate_one_time_keys(newKeyCount);
|
||||||
|
sessionStore.set(ACCOUNT_SESSION_KEY, this._account.pickle(this._pickleKey));
|
||||||
|
});
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
writeSync(deviceOneTimeKeysCount, txn) {
|
writeSync(deviceOneTimeKeysCount, txn) {
|
||||||
// we only upload signed_curve25519 otks
|
// we only upload signed_curve25519 otks
|
||||||
const otkCount = deviceOneTimeKeysCount.signed_curve25519;
|
const otkCount = deviceOneTimeKeysCount.signed_curve25519;
|
||||||
|
|
Reference in a new issue