hydrogen-web/src/logging/IDBLogger.js

203 lines
6.1 KiB
JavaScript
Raw Normal View History

2021-02-12 01:37:18 +05:30
/*
Copyright 2020 Bruno Windels <bruno@windels.cloud>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
2021-02-12 17:34:05 +05:30
import {
openDatabase,
txnAsPromise,
reqAsPromise,
iterateCursor,
fetchResults,
encodeUint64
} from "../matrix/storage/idb/utils.js";
import {BaseLogger} from "./BaseLogger.js";
export class IDBLogger extends BaseLogger {
2021-02-12 20:38:07 +05:30
constructor(options) {
super(options);
const {name, flushInterval = 5 * 1000, limit = 3000} = options;
2021-02-12 01:37:18 +05:30
this._name = name;
this._limit = limit;
// does not get loaded from idb on startup as we only use it to
// differentiate between two items with the same start time
this._itemCounter = 0;
this._queuedItems = this._loadQueuedItems();
2021-02-12 17:34:05 +05:30
// TODO: add dirty flag when calling descend
// TODO: also listen for unload just in case sync keeps on running after pagehide is fired?
2021-02-12 01:37:18 +05:30
window.addEventListener("pagehide", this, false);
2021-02-12 22:35:39 +05:30
this._flushInterval = this._platform.clock.createInterval(() => this._tryFlush(), flushInterval);
2021-02-12 01:37:18 +05:30
}
dispose() {
window.removeEventListener("pagehide", this, false);
this._flushInterval.dispose();
}
handleEvent(evt) {
if (evt.type === "pagehide") {
this._finishAllAndFlush();
}
}
async _tryFlush() {
const db = await this._openDB();
try {
2021-02-12 22:35:39 +05:30
const txn = db.transaction(["logs"], "readwrite");
2021-02-12 01:37:18 +05:30
const logs = txn.objectStore("logs");
const amount = this._queuedItems.length;
for(const i of this._queuedItems) {
logs.add(i);
}
2021-02-12 17:34:05 +05:30
// TODO: delete more than needed so we don't delete on every flush?
2021-02-12 01:37:18 +05:30
// trim logs if needed
const itemCount = await reqAsPromise(logs.count());
if (itemCount > this._limit) {
let currentCount = itemCount;
await iterateCursor(logs.openCursor(), (_, __, cursor) => {
cursor.delete();
currentCount -= 1;
return {done: currentCount <= this._limit};
});
}
await txnAsPromise(txn);
this._queuedItems.splice(0, amount);
} finally {
try {
db.close();
} catch (e) {}
}
}
_finishAllAndFlush() {
this._finishOpenItems();
2021-02-12 01:37:18 +05:30
this._persistQueuedItems(this._queuedItems);
}
_loadQueuedItems() {
const key = `${this._name}_queuedItems`;
2021-02-12 17:34:05 +05:30
try {
const json = window.localStorage.getItem(key);
if (json) {
window.localStorage.removeItem(key);
return JSON.parse(json);
}
} catch (e) {}
2021-02-12 01:37:18 +05:30
return [];
}
_openDB() {
return openDatabase(this._name, db => db.createObjectStore("logs", {keyPath: "id"}), 1);
}
_persistItem(serializedItem) {
2021-02-12 01:37:18 +05:30
this._itemCounter += 1;
this._queuedItems.push({
id: `${encodeUint64(serializedItem.s)}:${this._itemCounter}`,
tree: serializedItem
2021-02-12 01:37:18 +05:30
});
}
_persistQueuedItems(items) {
2021-02-12 17:34:05 +05:30
try {
window.localStorage.setItem(`${this._name}_queuedItems`, JSON.stringify(items));
} catch (e) {
console.warn("Could not persist queued log items in localStorage, they will likely be lost", e);
}
2021-02-12 01:37:18 +05:30
}
2021-02-12 17:34:05 +05:30
async export() {
2021-02-12 22:35:39 +05:30
const db = await this._openDB();
2021-02-12 01:37:18 +05:30
try {
2021-02-12 22:35:39 +05:30
const txn = db.transaction(["logs"], "readonly");
2021-02-12 01:37:18 +05:30
const logs = txn.objectStore("logs");
const storedItems = await fetchResults(logs.openCursor(), () => false);
const allItems = storedItems.concat(this._queuedItems);
const sortedItems = allItems.sort((a, b) => {
2021-02-12 01:37:18 +05:30
return a.id > b.id;
});
2021-02-12 17:34:05 +05:30
return new IDBLogExport(sortedItems, this, this._platform);
2021-02-12 01:37:18 +05:30
} finally {
try {
db.close();
} catch (e) {}
}
}
2021-02-12 17:34:05 +05:30
async _removeItems(items) {
2021-02-12 22:35:39 +05:30
const db = await this._openDB();
2021-02-12 17:34:05 +05:30
try {
2021-02-12 22:35:39 +05:30
const txn = db.transaction(["logs"], "readwrite");
2021-02-12 17:34:05 +05:30
const logs = txn.objectStore("logs");
for (const item of items) {
const queuedIdx = this._queuedItems.findIndex(i => i.id === item.id);
if (queuedIdx === -1) {
logs.delete(item.id);
} else {
this._queuedItems.splice(queuedIdx, 1);
}
}
await txnAsPromise(txn);
} finally {
try {
db.close();
} catch (e) {}
}
}
}
class IDBLogExport {
constructor(items, logger, platform) {
this._items = items;
this._logger = logger;
this._platform = platform;
}
get minTimestamp() {
if (!this._items.length) {
return 0;
}
return this._items[0].start;
}
get maxTimestamp() {
if (!this._items.length) {
return 0;
}
return this._items[this._items.length - 1].end;
}
get count() {
return this._items.length;
}
2021-02-12 17:34:05 +05:30
/**
* @return {Promise}
*/
removeFromStore() {
return this._logger._removeItems(this._items);
}
asBlob() {
const log = {
version: 1,
items: this._items
};
const json = JSON.stringify(log);
2021-02-12 20:38:07 +05:30
const buffer = this._platform.encoding.utf8.encode(json);
2021-02-12 17:34:05 +05:30
const blob = this._platform.createBlob(buffer, "application/json");
return blob;
}
2021-02-12 01:37:18 +05:30
}