From 7ce5cdfc4ae4840028af2a3ea96ea431920c71bd Mon Sep 17 00:00:00 2001 From: Bruno Windels <274386+bwindels@users.noreply.github.com> Date: Thu, 22 Sep 2022 13:19:56 +0200 Subject: [PATCH] prevent concurrent calls of OlmEncryption.encrypt to OOM olm wasm heap This is being triggered by connecting to many call members at once, while encrypting the signaling messages. This keeps many olm.Session objects into memory at the same time, which makes olm run out of wasm heap memory. --- src/matrix/e2ee/olm/Encryption.ts | 38 ++++++++++++++++++++++++++++--- 1 file changed, 35 insertions(+), 3 deletions(-) diff --git a/src/matrix/e2ee/olm/Encryption.ts b/src/matrix/e2ee/olm/Encryption.ts index dcc9f0b1..5fd1f25b 100644 --- a/src/matrix/e2ee/olm/Encryption.ts +++ b/src/matrix/e2ee/olm/Encryption.ts @@ -21,6 +21,7 @@ import {createSessionEntry} from "./Session"; import type {OlmMessage, OlmPayload, OlmEncryptedMessageContent} from "./types"; import type {Account} from "../Account"; import type {LockMap} from "../../../utils/LockMap"; +import {Lock, MultiLock, ILock} from "../../../utils/Lock"; import type {Storage} from "../../storage/idb/Storage"; import type {Transaction} from "../../storage/idb/Transaction"; import type {DeviceIdentity} from "../../storage/idb/stores/DeviceIdentityStore"; @@ -62,6 +63,9 @@ const OTK_ALGORITHM = "signed_curve25519"; const MAX_BATCH_SIZE = 20; export class Encryption { + + private _batchLocks: Array; + constructor( private readonly account: Account, private readonly pickleKey: string, @@ -71,14 +75,42 @@ export class Encryption { private readonly ownUserId: string, private readonly olmUtil: Olm.Utility, private readonly senderKeyLock: LockMap - ) {} + ) { + this._batchLocks = new Array(MAX_BATCH_SIZE); + for (let i = 0; i < MAX_BATCH_SIZE; i += 1) { + this._batchLocks[i] = new Lock(); + } + } + + /** A hack to prevent olm OOMing when `encrypt` is called several times concurrently, + * which is the case when encrypting voip signalling message to send over to_device. + * A better fix will be to extract the common bits from megolm/KeyLoader in a super class + * and have some sort of olm/SessionLoader that is shared between encryption and decryption + * and only keeps the olm session in wasm memory for a brief moment, like we already do for RoomKeys, + * and get the benefit of an optimal cache at the same time. + * */ + private async _takeBatchLock(amount: number): Promise { + const locks = this._batchLocks.filter(l => !l.isTaken).slice(0, amount); + if (locks.length < amount) { + const takenLocks = this._batchLocks.filter(l => l.isTaken).slice(0, amount - locks.length); + locks.push(...takenLocks); + } + await Promise.all(locks.map(l => l.take())); + return new MultiLock(locks); + } async encrypt(type: string, content: Record, devices: DeviceIdentity[], hsApi: HomeServerApi, log: ILogItem): Promise { let messages: EncryptedMessage[] = []; for (let i = 0; i < devices.length ; i += MAX_BATCH_SIZE) { const batchDevices = devices.slice(i, i + MAX_BATCH_SIZE); - const batchMessages = await this._encryptForMaxDevices(type, content, batchDevices, hsApi, log); - messages = messages.concat(batchMessages); + const batchLock = await this._takeBatchLock(batchDevices.length); + try { + const batchMessages = await this._encryptForMaxDevices(type, content, batchDevices, hsApi, log); + messages = messages.concat(batchMessages); + } + finally { + batchLock.release(); + } } return messages; }