only load 50 olm sessions at once
This commit is contained in:
parent
10b5614fd9
commit
4ca5ff9b9f
1 changed files with 13 additions and 0 deletions
|
@ -29,6 +29,9 @@ function findFirstSessionId(sessionIds) {
|
||||||
}
|
}
|
||||||
|
|
||||||
const OTK_ALGORITHM = "signed_curve25519";
|
const OTK_ALGORITHM = "signed_curve25519";
|
||||||
|
// only encrypt this amount of olm messages at once otherwise we run out of wasm memory
|
||||||
|
// with all the sessions loaded at the same time
|
||||||
|
const MAX_BATCH_SIZE = 50;
|
||||||
|
|
||||||
export class Encryption {
|
export class Encryption {
|
||||||
constructor({account, olm, olmUtil, ownUserId, storage, now, pickleKey, senderKeyLock}) {
|
constructor({account, olm, olmUtil, ownUserId, storage, now, pickleKey, senderKeyLock}) {
|
||||||
|
@ -43,6 +46,16 @@ export class Encryption {
|
||||||
}
|
}
|
||||||
|
|
||||||
async encrypt(type, content, devices, hsApi) {
|
async encrypt(type, content, devices, hsApi) {
|
||||||
|
let messages = [];
|
||||||
|
for (let i = 0; i < devices.length ; i += MAX_BATCH_SIZE) {
|
||||||
|
const batchDevices = devices.slice(i, i + MAX_BATCH_SIZE);
|
||||||
|
const batchMessages = await this._encryptForMaxDevices(type, content, batchDevices, hsApi);
|
||||||
|
messages = messages.concat(batchMessages);
|
||||||
|
}
|
||||||
|
return messages;
|
||||||
|
}
|
||||||
|
|
||||||
|
async _encryptForMaxDevices(type, content, devices, hsApi) {
|
||||||
// TODO: see if we can only hold some of the locks until after the /keys/claim call (if needed)
|
// TODO: see if we can only hold some of the locks until after the /keys/claim call (if needed)
|
||||||
// take a lock on all senderKeys so decryption and other calls to encrypt (should not happen)
|
// take a lock on all senderKeys so decryption and other calls to encrypt (should not happen)
|
||||||
// don't modify the sessions at the same time
|
// don't modify the sessions at the same time
|
||||||
|
|
Reference in a new issue