2020-08-05 22:08:55 +05:30
|
|
|
/*
|
|
|
|
Copyright 2020 Bruno Windels <bruno@windels.cloud>
|
|
|
|
|
|
|
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
you may not use this file except in compliance with the License.
|
|
|
|
You may obtain a copy of the License at
|
|
|
|
|
|
|
|
http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
See the License for the specific language governing permissions and
|
|
|
|
limitations under the License.
|
|
|
|
*/
|
|
|
|
|
2021-09-30 08:13:48 +05:30
|
|
|
import {SortedArray} from "../../../observable/list/SortedArray";
|
2020-04-19 22:35:12 +05:30
|
|
|
import {ConnectionError} from "../../error.js";
|
2021-05-20 15:13:09 +05:30
|
|
|
import {PendingEvent, SendStatus} from "./PendingEvent.js";
|
2021-05-19 20:11:07 +05:30
|
|
|
import {makeTxnId, isTxnId} from "../../common.js";
|
2021-05-20 13:31:30 +05:30
|
|
|
import {REDACTION_TYPE} from "../common.js";
|
2021-07-21 00:47:44 +05:30
|
|
|
import {getRelationFromContent, getRelationTarget, setRelationTarget, REACTION_TYPE, ANNOTATION_RELATION_TYPE} from "../timeline/relations.js";
|
2019-07-01 13:30:29 +05:30
|
|
|
|
2020-04-21 00:56:39 +05:30
|
|
|
export class SendQueue {
|
2020-09-22 17:13:18 +05:30
|
|
|
constructor({roomId, storage, hsApi, pendingEvents}) {
|
2019-07-27 02:03:33 +05:30
|
|
|
pendingEvents = pendingEvents || [];
|
2019-07-27 01:33:57 +05:30
|
|
|
this._roomId = roomId;
|
|
|
|
this._storage = storage;
|
2020-09-22 17:13:18 +05:30
|
|
|
this._hsApi = hsApi;
|
2019-07-27 01:33:57 +05:30
|
|
|
this._pendingEvents = new SortedArray((a, b) => a.queueIndex - b.queueIndex);
|
2020-11-18 17:32:38 +05:30
|
|
|
this._pendingEvents.setManyUnsorted(pendingEvents.map(data => this._createPendingEvent(data)));
|
2019-07-27 01:33:57 +05:30
|
|
|
this._isSending = false;
|
2019-06-28 04:22:54 +05:30
|
|
|
this._offline = false;
|
2020-09-03 19:06:48 +05:30
|
|
|
this._roomEncryption = null;
|
2021-06-02 16:04:02 +05:30
|
|
|
this._currentQueueIndex = 0;
|
2020-09-03 19:06:48 +05:30
|
|
|
}
|
|
|
|
|
2020-11-18 17:32:38 +05:30
|
|
|
_createPendingEvent(data, attachments = null) {
|
|
|
|
const pendingEvent = new PendingEvent({
|
|
|
|
data,
|
|
|
|
remove: () => this._removeEvent(pendingEvent),
|
2021-06-21 22:32:42 +05:30
|
|
|
emitUpdate: params => this._pendingEvents.update(pendingEvent, params),
|
2020-11-18 17:32:38 +05:30
|
|
|
attachments
|
|
|
|
});
|
|
|
|
return pendingEvent;
|
|
|
|
}
|
|
|
|
|
2020-09-03 19:06:48 +05:30
|
|
|
enableEncryption(roomEncryption) {
|
|
|
|
this._roomEncryption = roomEncryption;
|
2019-06-28 04:22:54 +05:30
|
|
|
}
|
|
|
|
|
2021-02-23 23:52:59 +05:30
|
|
|
_sendLoop(log) {
|
2019-07-27 01:33:57 +05:30
|
|
|
this._isSending = true;
|
2021-02-23 23:52:59 +05:30
|
|
|
this._sendLoopLogItem = log.runDetached("send queue flush", async log => {
|
|
|
|
try {
|
2021-05-20 18:21:04 +05:30
|
|
|
for (const pendingEvent of this._pendingEvents) {
|
2021-02-23 23:52:59 +05:30
|
|
|
await log.wrap("send event", async log => {
|
2021-02-24 00:28:01 +05:30
|
|
|
log.set("queueIndex", pendingEvent.queueIndex);
|
2021-02-23 23:52:59 +05:30
|
|
|
try {
|
2021-06-02 16:04:02 +05:30
|
|
|
this._currentQueueIndex = pendingEvent.queueIndex;
|
2021-02-23 23:52:59 +05:30
|
|
|
await this._sendEvent(pendingEvent, log);
|
|
|
|
} catch(err) {
|
|
|
|
if (err instanceof ConnectionError) {
|
|
|
|
this._offline = true;
|
|
|
|
log.set("offline", true);
|
2021-05-20 18:19:54 +05:30
|
|
|
pendingEvent.setWaiting();
|
2021-02-23 23:52:59 +05:30
|
|
|
} else {
|
|
|
|
log.catch(err);
|
2021-05-20 18:22:30 +05:30
|
|
|
const isPermanentError = err.name === "HomeServerError" && (
|
|
|
|
err.statusCode === 400 || // bad request, must be a bug on our end
|
|
|
|
err.statusCode === 403 || // forbidden
|
|
|
|
err.statusCode === 404 // not found
|
|
|
|
);
|
|
|
|
if (isPermanentError) {
|
|
|
|
log.set("remove", true);
|
|
|
|
await pendingEvent.abort();
|
|
|
|
} else {
|
|
|
|
pendingEvent.setError(err);
|
|
|
|
}
|
2021-02-23 23:52:59 +05:30
|
|
|
}
|
2021-06-02 16:04:02 +05:30
|
|
|
} finally {
|
|
|
|
this._currentQueueIndex = 0;
|
2021-02-23 23:52:59 +05:30
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
} finally {
|
|
|
|
this._isSending = false;
|
|
|
|
this._sendLoopLogItem = null;
|
2019-06-28 04:22:54 +05:30
|
|
|
}
|
2021-02-23 23:52:59 +05:30
|
|
|
});
|
2019-06-28 04:22:54 +05:30
|
|
|
}
|
|
|
|
|
2021-02-23 23:52:59 +05:30
|
|
|
async _sendEvent(pendingEvent, log) {
|
2020-11-18 17:32:38 +05:30
|
|
|
if (pendingEvent.needsUpload) {
|
2021-02-23 23:52:59 +05:30
|
|
|
await log.wrap("upload attachments", log => pendingEvent.uploadAttachments(this._hsApi, log));
|
2020-11-18 17:32:38 +05:30
|
|
|
await this._tryUpdateEvent(pendingEvent);
|
|
|
|
}
|
|
|
|
if (pendingEvent.needsEncryption) {
|
|
|
|
pendingEvent.setEncrypting();
|
2021-08-06 22:57:17 +05:30
|
|
|
const encryptionContent = pendingEvent.contentForEncryption;
|
2021-02-23 23:52:59 +05:30
|
|
|
const {type, content} = await log.wrap("encrypt", log => this._roomEncryption.encrypt(
|
2021-08-06 22:57:17 +05:30
|
|
|
pendingEvent.eventType, encryptionContent, this._hsApi, log));
|
2020-11-18 17:32:38 +05:30
|
|
|
pendingEvent.setEncrypted(type, content);
|
|
|
|
await this._tryUpdateEvent(pendingEvent);
|
|
|
|
}
|
|
|
|
if (pendingEvent.needsSending) {
|
2021-02-23 23:52:59 +05:30
|
|
|
await pendingEvent.send(this._hsApi, log);
|
2021-05-19 20:11:07 +05:30
|
|
|
// we now have a remoteId, but this pending event may be removed at any point in the future
|
2021-05-31 19:25:31 +05:30
|
|
|
// (or past, so can't assume it still exists) once the remote echo comes in.
|
|
|
|
// So if we have any related events that need to resolve the relatedTxnId to a related event id,
|
|
|
|
// they need to do so now.
|
2021-05-19 20:11:07 +05:30
|
|
|
// We ensure this by writing the new remote id for the pending event and all related events
|
|
|
|
// with unresolved relatedTxnId in the queue in one transaction.
|
|
|
|
const txn = await this._storage.readWriteTxn([this._storage.storeNames.pendingEvents]);
|
|
|
|
try {
|
|
|
|
await this._tryUpdateEventWithTxn(pendingEvent, txn);
|
2021-05-21 14:17:48 +05:30
|
|
|
await this._resolveRemoteIdInPendingRelations(
|
|
|
|
pendingEvent.txnId, pendingEvent.remoteId, txn);
|
2021-05-19 20:11:07 +05:30
|
|
|
} catch (err) {
|
|
|
|
txn.abort();
|
|
|
|
throw err;
|
|
|
|
}
|
|
|
|
await txn.complete();
|
2020-11-18 17:32:38 +05:30
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-21 14:17:48 +05:30
|
|
|
async _resolveRemoteIdInPendingRelations(txnId, remoteId, txn) {
|
|
|
|
const relatedEventWithoutRemoteId = this._pendingEvents.array.filter(pe => {
|
|
|
|
return pe.relatedTxnId === txnId && pe.relatedEventId !== remoteId;
|
|
|
|
});
|
|
|
|
for (const relatedPE of relatedEventWithoutRemoteId) {
|
|
|
|
relatedPE.setRelatedEventId(remoteId);
|
|
|
|
await this._tryUpdateEventWithTxn(relatedPE, txn);
|
|
|
|
}
|
|
|
|
return relatedEventWithoutRemoteId;
|
|
|
|
}
|
|
|
|
|
|
|
|
async removeRemoteEchos(events, txn, parentLog) {
|
2019-07-27 02:03:33 +05:30
|
|
|
const removed = [];
|
|
|
|
for (const event of events) {
|
|
|
|
const txnId = event.unsigned && event.unsigned.transaction_id;
|
2020-03-24 03:30:33 +05:30
|
|
|
let idx;
|
2019-07-27 02:03:33 +05:30
|
|
|
if (txnId) {
|
2020-03-24 03:30:33 +05:30
|
|
|
idx = this._pendingEvents.array.findIndex(pe => pe.txnId === txnId);
|
|
|
|
} else {
|
|
|
|
idx = this._pendingEvents.array.findIndex(pe => pe.remoteId === event.event_id);
|
|
|
|
}
|
|
|
|
if (idx !== -1) {
|
|
|
|
const pendingEvent = this._pendingEvents.get(idx);
|
2021-05-21 14:17:48 +05:30
|
|
|
const remoteId = event.event_id;
|
|
|
|
parentLog.log({l: "removeRemoteEcho", queueIndex: pendingEvent.queueIndex, remoteId, txnId});
|
2020-03-24 03:30:33 +05:30
|
|
|
txn.pendingEvents.remove(pendingEvent.roomId, pendingEvent.queueIndex);
|
|
|
|
removed.push(pendingEvent);
|
2021-05-21 14:17:48 +05:30
|
|
|
await this._resolveRemoteIdInPendingRelations(txnId, remoteId, txn);
|
2019-07-27 02:03:33 +05:30
|
|
|
}
|
|
|
|
}
|
|
|
|
return removed;
|
|
|
|
}
|
2019-06-28 04:22:54 +05:30
|
|
|
|
2020-11-18 17:32:38 +05:30
|
|
|
async _removeEvent(pendingEvent) {
|
2021-06-08 16:50:55 +05:30
|
|
|
let hasEvent = this._pendingEvents.array.indexOf(pendingEvent) !== -1;
|
|
|
|
if (hasEvent) {
|
2021-03-05 00:17:02 +05:30
|
|
|
const txn = await this._storage.readWriteTxn([this._storage.storeNames.pendingEvents]);
|
2020-11-18 17:32:38 +05:30
|
|
|
try {
|
|
|
|
txn.pendingEvents.remove(pendingEvent.roomId, pendingEvent.queueIndex);
|
|
|
|
} catch (err) {
|
|
|
|
txn.abort();
|
|
|
|
}
|
|
|
|
await txn.complete();
|
2021-06-08 16:50:55 +05:30
|
|
|
// lookup index after async txn is complete,
|
|
|
|
// to make sure we're not racing with anything
|
|
|
|
const idx = this._pendingEvents.array.indexOf(pendingEvent);
|
|
|
|
if (idx !== -1) {
|
|
|
|
this._pendingEvents.remove(idx);
|
|
|
|
}
|
2020-11-18 17:32:38 +05:30
|
|
|
}
|
2020-11-19 00:38:42 +05:30
|
|
|
pendingEvent.dispose();
|
2020-11-18 17:32:38 +05:30
|
|
|
}
|
|
|
|
|
2019-07-27 02:03:33 +05:30
|
|
|
emitRemovals(pendingEvents) {
|
|
|
|
for (const pendingEvent of pendingEvents) {
|
|
|
|
const idx = this._pendingEvents.array.indexOf(pendingEvent);
|
|
|
|
if (idx !== -1) {
|
|
|
|
this._pendingEvents.remove(idx);
|
|
|
|
}
|
2020-11-19 00:38:42 +05:30
|
|
|
pendingEvent.dispose();
|
2019-07-27 01:33:57 +05:30
|
|
|
}
|
2019-06-28 04:22:54 +05:30
|
|
|
}
|
|
|
|
|
2021-02-23 23:52:59 +05:30
|
|
|
resumeSending(parentLog) {
|
2019-07-27 01:33:57 +05:30
|
|
|
this._offline = false;
|
2021-02-23 23:52:59 +05:30
|
|
|
if (this._pendingEvents.length) {
|
|
|
|
parentLog.wrap("resumeSending", log => {
|
|
|
|
log.set("id", this._roomId);
|
|
|
|
log.set("pendingEvents", this._pendingEvents.length);
|
|
|
|
if (!this._isSending) {
|
|
|
|
this._sendLoop(log);
|
|
|
|
}
|
|
|
|
if (this._sendLoopLogItem) {
|
|
|
|
log.refDetached(this._sendLoopLogItem);
|
|
|
|
}
|
|
|
|
});
|
2019-07-27 01:33:57 +05:30
|
|
|
}
|
2019-06-28 04:22:54 +05:30
|
|
|
}
|
2019-07-01 13:30:29 +05:30
|
|
|
|
2021-02-23 23:52:59 +05:30
|
|
|
async enqueueEvent(eventType, content, attachments, log) {
|
2021-06-04 14:18:59 +05:30
|
|
|
const relation = getRelationFromContent(content);
|
|
|
|
let relatedTxnId = null;
|
2021-06-24 16:55:58 +05:30
|
|
|
if (relation) {
|
2021-07-21 00:47:44 +05:30
|
|
|
const relationTarget = getRelationTarget(relation);
|
|
|
|
if (isTxnId(relationTarget)) {
|
|
|
|
relatedTxnId = relationTarget;
|
|
|
|
setRelationTarget(relation, null);
|
2021-06-24 16:55:58 +05:30
|
|
|
}
|
|
|
|
if (relation.rel_type === ANNOTATION_RELATION_TYPE) {
|
2021-07-21 00:47:44 +05:30
|
|
|
// Here we know the shape of the relation, and can use event_id safely
|
2021-06-24 16:55:58 +05:30
|
|
|
const isAlreadyAnnotating = this._pendingEvents.array.some(pe => {
|
|
|
|
const r = getRelationFromContent(pe.content);
|
|
|
|
return pe.eventType === eventType && r && r.key === relation.key &&
|
|
|
|
(pe.relatedTxnId === relatedTxnId || r.event_id === relation.event_id);
|
|
|
|
});
|
|
|
|
if (isAlreadyAnnotating) {
|
|
|
|
log.set("already_annotating", true);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
2021-06-04 14:18:59 +05:30
|
|
|
}
|
|
|
|
await this._enqueueEvent(eventType, content, attachments, relatedTxnId, null, log);
|
2021-05-19 20:11:07 +05:30
|
|
|
}
|
|
|
|
|
2021-05-21 14:17:48 +05:30
|
|
|
async _enqueueEvent(eventType, content, attachments, relatedTxnId, relatedEventId, log) {
|
|
|
|
const pendingEvent = await this._createAndStoreEvent(eventType, content, relatedTxnId, relatedEventId, attachments);
|
2019-07-27 01:33:57 +05:30
|
|
|
this._pendingEvents.set(pendingEvent);
|
2021-02-23 23:52:59 +05:30
|
|
|
log.set("queueIndex", pendingEvent.queueIndex);
|
|
|
|
log.set("pendingEvents", this._pendingEvents.length);
|
2019-07-27 01:33:57 +05:30
|
|
|
if (!this._isSending && !this._offline) {
|
2021-02-23 23:52:59 +05:30
|
|
|
this._sendLoop(log);
|
|
|
|
}
|
|
|
|
if (this._sendLoopLogItem) {
|
|
|
|
log.refDetached(this._sendLoopLogItem);
|
2019-07-27 01:33:57 +05:30
|
|
|
}
|
2019-07-01 13:30:29 +05:30
|
|
|
}
|
2019-06-28 04:22:54 +05:30
|
|
|
|
2021-05-19 20:11:07 +05:30
|
|
|
async enqueueRedaction(eventIdOrTxnId, reason, log) {
|
2021-06-24 16:55:58 +05:30
|
|
|
const isAlreadyRedacting = this._pendingEvents.array.some(pe => {
|
2021-06-24 16:44:54 +05:30
|
|
|
return pe.eventType === REDACTION_TYPE &&
|
|
|
|
(pe.relatedTxnId === eventIdOrTxnId || pe.relatedEventId === eventIdOrTxnId);
|
|
|
|
});
|
2021-06-24 16:55:58 +05:30
|
|
|
if (isAlreadyRedacting) {
|
2021-06-24 16:44:54 +05:30
|
|
|
log.set("already_redacting", true);
|
|
|
|
return;
|
|
|
|
}
|
2021-05-21 14:17:48 +05:30
|
|
|
let relatedTxnId;
|
|
|
|
let relatedEventId;
|
2021-05-19 20:11:07 +05:30
|
|
|
if (isTxnId(eventIdOrTxnId)) {
|
2021-05-21 14:17:48 +05:30
|
|
|
relatedTxnId = eventIdOrTxnId;
|
2021-05-19 20:11:07 +05:30
|
|
|
const txnId = eventIdOrTxnId;
|
|
|
|
const pe = this._pendingEvents.array.find(pe => pe.txnId === txnId);
|
|
|
|
if (pe && !pe.remoteId && pe.status !== SendStatus.Sending) {
|
|
|
|
// haven't started sending this event yet,
|
|
|
|
// just remove it from the queue
|
2021-05-21 20:29:29 +05:30
|
|
|
log.set("remove", relatedTxnId);
|
2021-05-19 20:11:07 +05:30
|
|
|
await pe.abort();
|
|
|
|
return;
|
2021-05-21 14:17:48 +05:30
|
|
|
} else if (pe) {
|
|
|
|
relatedEventId = pe.remoteId;
|
|
|
|
} else {
|
2021-05-19 20:11:07 +05:30
|
|
|
// we don't have the pending event anymore,
|
|
|
|
// the remote echo must have arrived in the meantime.
|
|
|
|
// we could look for it in the timeline, but for now
|
|
|
|
// we don't do anything as this race is quite unlikely
|
|
|
|
// and a bit complicated to fix.
|
|
|
|
return;
|
|
|
|
}
|
2021-05-20 18:23:17 +05:30
|
|
|
} else {
|
2021-05-21 14:17:48 +05:30
|
|
|
relatedEventId = eventIdOrTxnId;
|
2021-05-21 20:29:29 +05:30
|
|
|
const pe = this._pendingEvents.array.find(pe => pe.remoteId === relatedEventId);
|
|
|
|
if (pe) {
|
|
|
|
// also set the txn id just in case that an event id was passed
|
|
|
|
// for relating to a pending event that is still waiting for the remote echo
|
|
|
|
relatedTxnId = pe.txnId;
|
|
|
|
}
|
2021-05-19 20:11:07 +05:30
|
|
|
}
|
2021-06-02 16:04:14 +05:30
|
|
|
log.set("relatedTxnId", relatedTxnId);
|
2021-05-21 20:29:29 +05:30
|
|
|
log.set("relatedEventId", relatedEventId);
|
2021-05-21 14:17:48 +05:30
|
|
|
await this._enqueueEvent(REDACTION_TYPE, {reason}, null, relatedTxnId, relatedEventId, log);
|
2021-05-19 20:11:07 +05:30
|
|
|
}
|
|
|
|
|
2019-07-27 01:33:57 +05:30
|
|
|
get pendingEvents() {
|
|
|
|
return this._pendingEvents;
|
2019-07-01 13:30:29 +05:30
|
|
|
}
|
|
|
|
|
2019-07-27 01:33:57 +05:30
|
|
|
async _tryUpdateEvent(pendingEvent) {
|
2021-03-05 00:17:02 +05:30
|
|
|
const txn = await this._storage.readWriteTxn([this._storage.storeNames.pendingEvents]);
|
2019-07-27 01:33:57 +05:30
|
|
|
try {
|
2021-05-19 20:11:07 +05:30
|
|
|
this._tryUpdateEventWithTxn(pendingEvent, txn);
|
2019-07-27 01:33:57 +05:30
|
|
|
} catch (err) {
|
|
|
|
txn.abort();
|
|
|
|
throw err;
|
|
|
|
}
|
|
|
|
await txn.complete();
|
|
|
|
}
|
2019-07-01 13:30:29 +05:30
|
|
|
|
2021-05-19 20:11:07 +05:30
|
|
|
async _tryUpdateEventWithTxn(pendingEvent, txn) {
|
|
|
|
// pendingEvent might have been removed already here
|
|
|
|
// by a racing remote echo, so check first so we don't recreate it
|
|
|
|
if (await txn.pendingEvents.exists(pendingEvent.roomId, pendingEvent.queueIndex)) {
|
|
|
|
txn.pendingEvents.update(pendingEvent.data);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-21 14:17:48 +05:30
|
|
|
async _createAndStoreEvent(eventType, content, relatedTxnId, relatedEventId, attachments) {
|
2021-03-05 00:17:02 +05:30
|
|
|
const txn = await this._storage.readWriteTxn([this._storage.storeNames.pendingEvents]);
|
2019-07-27 01:33:57 +05:30
|
|
|
let pendingEvent;
|
|
|
|
try {
|
|
|
|
const pendingEventsStore = txn.pendingEvents;
|
2021-06-02 16:04:02 +05:30
|
|
|
const maxStorageQueueIndex = await pendingEventsStore.getMaxQueueIndex(this._roomId) || 0;
|
|
|
|
// don't use the queueIndex of the pendingEvent currently waiting for /send to return
|
|
|
|
// if the remote echo already removed the pendingEvent in storage, as the send loop
|
|
|
|
// wouldn't be able to detect the remote echo already arrived and end up overwriting the new event
|
|
|
|
const maxQueueIndex = Math.max(maxStorageQueueIndex, this._currentQueueIndex);
|
2019-07-27 01:33:57 +05:30
|
|
|
const queueIndex = maxQueueIndex + 1;
|
2021-06-23 21:19:27 +05:30
|
|
|
const needsEncryption = eventType !== REDACTION_TYPE &&
|
|
|
|
eventType !== REACTION_TYPE &&
|
|
|
|
!!this._roomEncryption;
|
2020-11-18 17:32:38 +05:30
|
|
|
pendingEvent = this._createPendingEvent({
|
2019-07-27 01:33:57 +05:30
|
|
|
roomId: this._roomId,
|
|
|
|
queueIndex,
|
|
|
|
eventType,
|
|
|
|
content,
|
2021-05-21 14:17:48 +05:30
|
|
|
relatedTxnId,
|
|
|
|
relatedEventId,
|
2020-09-03 19:06:48 +05:30
|
|
|
txnId: makeTxnId(),
|
2021-05-19 20:11:07 +05:30
|
|
|
needsEncryption,
|
2020-11-18 17:32:38 +05:30
|
|
|
needsUpload: !!attachments
|
2020-11-13 21:49:19 +05:30
|
|
|
}, attachments);
|
2019-07-27 01:33:57 +05:30
|
|
|
pendingEventsStore.add(pendingEvent.data);
|
|
|
|
} catch (err) {
|
|
|
|
txn.abort();
|
|
|
|
throw err;
|
|
|
|
}
|
2019-07-01 13:30:29 +05:30
|
|
|
await txn.complete();
|
2019-07-27 01:33:57 +05:30
|
|
|
return pendingEvent;
|
2019-07-01 13:30:29 +05:30
|
|
|
}
|
2020-11-19 00:38:42 +05:30
|
|
|
|
|
|
|
dispose() {
|
2021-02-23 23:34:25 +05:30
|
|
|
for (const pe of this._pendingEvents) {
|
2020-11-19 00:38:42 +05:30
|
|
|
pe.dispose();
|
|
|
|
}
|
|
|
|
}
|
2019-06-28 04:22:54 +05:30
|
|
|
}
|
2021-06-02 16:03:15 +05:30
|
|
|
|
|
|
|
import {HomeServer as MockHomeServer} from "../../../mocks/HomeServer.js";
|
2021-09-28 17:49:29 +05:30
|
|
|
import {createMockStorage} from "../../../mocks/Storage";
|
2021-06-21 22:32:42 +05:30
|
|
|
import {ListObserver} from "../../../mocks/ListObserver.js";
|
2021-11-15 17:29:08 +05:30
|
|
|
import {NullLogger, NullLogItem} from "../../../logging/NullLogger";
|
2021-06-02 22:08:16 +05:30
|
|
|
import {createEvent, withTextBody, withTxnId} from "../../../mocks/event.js";
|
2021-06-02 16:03:15 +05:30
|
|
|
import {poll} from "../../../mocks/poll.js";
|
2021-06-24 16:55:58 +05:30
|
|
|
import {createAnnotation} from "../timeline/relations.js";
|
2021-06-02 16:03:15 +05:30
|
|
|
|
|
|
|
export function tests() {
|
|
|
|
const logger = new NullLogger();
|
|
|
|
return {
|
|
|
|
"enqueue second message when remote echo of first arrives before /send returns": async assert => {
|
|
|
|
const storage = await createMockStorage();
|
|
|
|
const hs = new MockHomeServer();
|
|
|
|
// 1. enqueue and start send event 1
|
|
|
|
const queue = new SendQueue({roomId: "!abc", storage, hsApi: hs.api});
|
2021-06-02 22:08:16 +05:30
|
|
|
const event1 = withTextBody("message 1", createEvent("m.room.message", "$123"));
|
2021-06-02 16:03:15 +05:30
|
|
|
await logger.run("event1", log => queue.enqueueEvent(event1.type, event1.content, null, log));
|
|
|
|
assert.equal(queue.pendingEvents.length, 1);
|
|
|
|
const sendRequest1 = hs.requests.send[0];
|
|
|
|
// 2. receive remote echo, before /send has returned
|
2021-06-02 22:08:16 +05:30
|
|
|
const remoteEcho = withTxnId(sendRequest1.arguments[2], event1);
|
2021-06-02 16:03:15 +05:30
|
|
|
const txn = await storage.readWriteTxn([storage.storeNames.pendingEvents]);
|
|
|
|
const removal = await logger.run("remote echo", log => queue.removeRemoteEchos([remoteEcho], txn, log));
|
|
|
|
await txn.complete();
|
|
|
|
assert.equal(removal.length, 1);
|
|
|
|
queue.emitRemovals(removal);
|
|
|
|
assert.equal(queue.pendingEvents.length, 0);
|
|
|
|
// 3. now enqueue event 2
|
2021-06-02 22:08:16 +05:30
|
|
|
const event2 = withTextBody("message 2", createEvent("m.room.message", "$456"));
|
2021-06-02 16:03:15 +05:30
|
|
|
await logger.run("event2", log => queue.enqueueEvent(event2.type, event2.content, null, log));
|
|
|
|
// even though the first pending event has been removed by the remote echo,
|
|
|
|
// the second should get the next index, as the send loop is still blocking on the first one
|
|
|
|
assert.equal(Array.from(queue.pendingEvents)[0].queueIndex, 2);
|
|
|
|
// 4. send for event 1 comes back
|
|
|
|
sendRequest1.respond({event_id: event1.event_id});
|
|
|
|
// 5. now expect second send request for event 2
|
|
|
|
const sendRequest2 = await poll(() => hs.requests.send[1]);
|
|
|
|
sendRequest2.respond({event_id: event2.event_id});
|
|
|
|
await poll(() => !queue._isSending);
|
2021-06-21 22:32:42 +05:30
|
|
|
},
|
|
|
|
"redaction of pending event that hasn't started sending yet aborts it": async assert => {
|
|
|
|
const queue = new SendQueue({
|
|
|
|
roomId: "!abc",
|
|
|
|
storage: await createMockStorage(),
|
|
|
|
hsApi: new MockHomeServer().api
|
|
|
|
});
|
|
|
|
// first, enqueue a message that will be attempted to send, but we don't respond
|
|
|
|
await queue.enqueueEvent("m.room.message", {body: "hello!"}, null, new NullLogItem());
|
|
|
|
|
|
|
|
const observer = new ListObserver();
|
|
|
|
queue.pendingEvents.subscribe(observer);
|
|
|
|
await queue.enqueueEvent("m.room.message", {body: "...world"}, null, new NullLogItem());
|
|
|
|
let txnId;
|
|
|
|
{
|
|
|
|
const {type, index, value} = await observer.next();
|
|
|
|
assert.equal(type, "add");
|
|
|
|
assert.equal(index, 1);
|
|
|
|
assert.equal(typeof value.txnId, "string");
|
|
|
|
txnId = value.txnId;
|
|
|
|
}
|
|
|
|
await queue.enqueueRedaction(txnId, null, new NullLogItem());
|
|
|
|
{
|
|
|
|
const {type, value, index} = await observer.next();
|
|
|
|
assert.equal(type, "remove");
|
|
|
|
assert.equal(index, 1);
|
|
|
|
assert.equal(txnId, value.txnId);
|
|
|
|
}
|
2021-06-24 16:44:54 +05:30
|
|
|
},
|
|
|
|
"duplicate redaction gets dropped": async assert => {
|
|
|
|
const queue = new SendQueue({
|
|
|
|
roomId: "!abc",
|
|
|
|
storage: await createMockStorage(),
|
|
|
|
hsApi: new MockHomeServer().api
|
|
|
|
});
|
|
|
|
assert.equal(queue.pendingEvents.length, 0);
|
|
|
|
await queue.enqueueRedaction("!event", null, new NullLogItem());
|
|
|
|
assert.equal(queue.pendingEvents.length, 1);
|
|
|
|
await queue.enqueueRedaction("!event", null, new NullLogItem());
|
|
|
|
assert.equal(queue.pendingEvents.length, 1);
|
2021-06-24 16:55:58 +05:30
|
|
|
},
|
|
|
|
"duplicate reaction gets dropped": async assert => {
|
|
|
|
const queue = new SendQueue({
|
|
|
|
roomId: "!abc",
|
|
|
|
storage: await createMockStorage(),
|
|
|
|
hsApi: new MockHomeServer().api
|
|
|
|
});
|
|
|
|
assert.equal(queue.pendingEvents.length, 0);
|
|
|
|
await queue.enqueueEvent("m.reaction", createAnnotation("!target", "🚀"), null, new NullLogItem());
|
|
|
|
assert.equal(queue.pendingEvents.length, 1);
|
2021-06-24 16:59:13 +05:30
|
|
|
await queue.enqueueEvent("m.reaction", createAnnotation("!target", "👋"), null, new NullLogItem());
|
|
|
|
assert.equal(queue.pendingEvents.length, 2);
|
2021-06-24 16:55:58 +05:30
|
|
|
await queue.enqueueEvent("m.reaction", createAnnotation("!target", "🚀"), null, new NullLogItem());
|
2021-06-24 16:59:13 +05:30
|
|
|
assert.equal(queue.pendingEvents.length, 2);
|
2021-06-24 16:55:58 +05:30
|
|
|
},
|
|
|
|
|
2021-06-02 16:03:15 +05:30
|
|
|
}
|
2021-06-08 16:50:55 +05:30
|
|
|
}
|