finish logging api

This commit is contained in:
Bruno Windels 2021-02-12 13:04:05 +01:00
parent 89599e9f87
commit 91bd5f638c
5 changed files with 226 additions and 166 deletions

60
src/logging/BaseLogger.js Normal file
View file

@ -0,0 +1,60 @@
/*
Copyright 2020 Bruno Windels <bruno@windels.cloud>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import {LogItem} from "./LogItem.js";
export class BaseLogger {
constructor(platform) {
this._openItems = new Set();
this._platform = platform;
}
wrapLog(labelOrValues, callback, logLevel) {
const item = new LogItem(labelOrValues, logLevel, this._platform);
const finishItem = () => {
this._persistItem(item);
this._openItems.remove(item);
};
try {
const result = item.run(callback);
if (result instanceof Promise) {
return result.then(promiseResult => {
finishItem();
return promiseResult;
}, err => {
finishItem();
throw err;
});
} else {
finishItem();
return result;
}
} catch (err) {
finishItem();
throw err;
}
}
_persistItem(item) {
throw new Error("not implemented");
}
async export() {
throw new Error("not implemented");
}
}

View file

@ -14,68 +14,27 @@ See the License for the specific language governing permissions and
limitations under the License. limitations under the License.
*/ */
import {openDatabase, txnAsPromise, reqAsPromise, iterateCursor, fetchResults} from "../matrix/storage/idb/utils.js"; import {
import {LogItem} from "./LogItem.js"; openDatabase,
txnAsPromise,
reqAsPromise,
iterateCursor,
fetchResults,
encodeUint64
} from "../matrix/storage/idb/utils.js";
import {BaseLogger} from "./BaseLogger.js";
class Logger { export class IDBLogger extends BaseLogger {
constructor(clock) { constructor({name, platform, flushInterval = 2 * 60 * 1000, limit = 3000}) {
this._openItems = new Set(); super(platform);
this._clock = clock;
}
decend(label, callback, logLevel) {
const item = new LogItem(label, this, logLevel, this._clock);
const failItem = (err) => {
item.catch(err);
finishItem();
throw err;
};
const finishItem = () => {
item.finish();
this._persistItem(item);
this._openItems.remove(item);
};
let result;
try {
result = callback(item);
if (result instanceof Promise) {
result = result.then(promiseResult => {
finishItem();
return promiseResult;
}, failItem);
}
} catch (err) {
failItem(err);
}
}
_persistItem(item) {
throw new Error("not implemented");
}
async extractItems() {
throw new Error("not implemented");
}
}
export function encodeUint64(n) {
const hex = n.toString(16);
return "0".repeat(16 - hex.length) + hex;
}
export default class IDBLogger extends Logger {
constructor({name, clock, utf8, flushInterval = 2 * 60 * 1000, limit = 1000}) {
super(clock);
this._utf8 = utf8;
this._name = name; this._name = name;
this._limit = limit; this._limit = limit;
// does not get loaded from idb on startup as we only use it to // does not get loaded from idb on startup as we only use it to
// differentiate between two items with the same start time // differentiate between two items with the same start time
this._itemCounter = 0; this._itemCounter = 0;
this._queuedItems = this._loadQueuedItems(); this._queuedItems = this._loadQueuedItems();
// TODO: add dirty flag when calling descend
// TODO: also listen for unload just in case sync keeps on running after pagehide is fired?
window.addEventListener("pagehide", this, false); window.addEventListener("pagehide", this, false);
this._flushInterval = this._clock.createInterval(() => this._tryFlush(), flushInterval); this._flushInterval = this._clock.createInterval(() => this._tryFlush(), flushInterval);
} }
@ -100,6 +59,7 @@ export default class IDBLogger extends Logger {
for(const i of this._queuedItems) { for(const i of this._queuedItems) {
logs.add(i); logs.add(i);
} }
// TODO: delete more than needed so we don't delete on every flush?
// trim logs if needed // trim logs if needed
const itemCount = await reqAsPromise(logs.count()); const itemCount = await reqAsPromise(logs.count());
if (itemCount > this._limit) { if (itemCount > this._limit) {
@ -130,11 +90,13 @@ export default class IDBLogger extends Logger {
_loadQueuedItems() { _loadQueuedItems() {
const key = `${this._name}_queuedItems`; const key = `${this._name}_queuedItems`;
try {
const json = window.localStorage.getItem(key); const json = window.localStorage.getItem(key);
if (json) { if (json) {
window.localStorage.removeItem(key); window.localStorage.removeItem(key);
return JSON.parse(json); return JSON.parse(json);
} }
} catch (e) {}
return []; return [];
} }
@ -146,39 +108,49 @@ export default class IDBLogger extends Logger {
this._itemCounter += 1; this._itemCounter += 1;
this._queuedItems.push({ this._queuedItems.push({
id: `${encodeUint64(item.start)}:${this._itemCounter}`, id: `${encodeUint64(item.start)}:${this._itemCounter}`,
// store as buffer so parsing overhead is lower tree: item.serialize()
content: this._utf8.encode(JSON.stringify(item.serialize()))
}); });
} }
_persistQueuedItems(items) { _persistQueuedItems(items) {
try {
window.localStorage.setItem(`${this._name}_queuedItems`, JSON.stringify(items)); window.localStorage.setItem(`${this._name}_queuedItems`, JSON.stringify(items));
} catch (e) {
console.warn("Could not persist queued log items in localStorage, they will likely be lost", e);
}
} }
// should we actually delete items and just not rely on trimming for it not to grow too large? async export() {
// just worried that people will create a file, not do anything with it, hit export logs again and then
// send a mostly empty file. we could just not delete but in _tryFlush (and perhaps make trimming delete a few more than needed to be below limit)
// how does eleweb handle this?
//
// both deletes and reads items from store
async extractItems() {
const db = this._openDB(); const db = this._openDB();
try { try {
const queuedItems = this._queuedItems.slice(); const txn = this.db.transaction(["logs"], "readonly");
const txn = this.db.transaction(["logs"], "readwrite");
const logs = txn.objectStore("logs"); const logs = txn.objectStore("logs");
const items = await fetchResults(logs.openCursor(), () => false); const items = await fetchResults(logs.openCursor(), () => false);
// we know we have read all the items as we're doing this in a txn const sortedItems = items.concat(this._queuedItems).sort((a, b) => {
logs.clear();
await txnAsPromise(txn);
// once the transaction is complete, remove the queued items
this._queuedItems.splice(0, queuedItems.length);
const sortedItems = items.concat(queuedItems).sort((a, b) => {
return a.id > b.id; return a.id > b.id;
}).map(i => {
}); });
return sortedItems; return new IDBLogExport(sortedItems, this, this._platform);
} finally {
try {
db.close();
} catch (e) {}
}
}
async _removeItems(items) {
const db = this._openDB();
try {
const txn = this.db.transaction(["logs"], "readwrite");
const logs = txn.objectStore("logs");
for (const item of items) {
const queuedIdx = this._queuedItems.findIndex(i => i.id === item.id);
if (queuedIdx === -1) {
logs.delete(item.id);
} else {
this._queuedItems.splice(queuedIdx, 1);
}
}
await txnAsPromise(txn);
} finally { } finally {
try { try {
db.close(); db.close();
@ -186,3 +158,29 @@ export default class IDBLogger extends Logger {
} }
} }
} }
class IDBLogExport {
constructor(items, logger, platform) {
this._items = items;
this._logger = logger;
this._platform = platform;
}
/**
* @return {Promise}
*/
removeFromStore() {
return this._logger._removeItems(this._items);
}
asBlob() {
const log = {
version: 1,
items: this._items
};
const json = JSON.stringify(log);
const buffer = this._platform.utf8.encode(json);
const blob = this._platform.createBlob(buffer, "application/json");
return blob;
}
}

View file

@ -15,25 +15,33 @@ limitations under the License.
*/ */
export class LogItem { export class LogItem {
constructor(label, parent, logLevel, clock) { constructor(labelOrValues, logLevel, platform) {
this._clock = clock; this._platform = platform;
this._start = clock.now(); this._start = platform.clock.now();
this._end = null; this._end = null;
this._values = {label}; this._values = typeof labelOrValues === "string" ? {label: labelOrValues} : labelOrValues;
this._parent = parent;
this._error = null; this._error = null;
this._children = []; this._children = [];
this._logLevel = logLevel; this._logLevel = logLevel;
} }
// should this handle errors in the same way as logger.descend/start? /**
descend(label, logLevel = this._logLevel) { * Creates a new child item and runs it in `callback`.
if (this._end !== null) { */
throw new Error("can't descend on finished item"); wrap(labelOrValues, callback, logLevel = this._logLevel) {
const item = this._createChild(labelOrValues, logLevel);
return item.run(callback);
} }
const item = new LogItem(label, logLevel);
this._children.push(item); /**
return item; * Creates a new child item that finishes immediately
* and can hence not be modified anymore.
*
* Hence, the child item is not returned.
*/
log(labelOrValues, logLevel = this._logLevel) {
const item = this._createChild(labelOrValues, logLevel);
item.end = item.start;
} }
set(key, value) { set(key, value) {
@ -45,22 +53,9 @@ export class LogItem {
} }
} }
// XXX: where will this be called? from wrapAsync? anonymize(value) {
finish() { const buffer = this._platform.crypto.digest("SHA-256", value);
if (this._end === null) { return this._platform.base64.encode(buffer);
for(const c of this._children) {
c.finish();
}
this._end = this._clock.now();
if (this._parent) {
this._parent._closeChild(this);
}
}
}
catch(err) {
this._error = err;
console.error(`log item ${this.values.label} failed: ${err.message}:\n${err.stack}`);
} }
serialize() { serialize() {
@ -81,14 +76,66 @@ export class LogItem {
}; };
} }
async wrapAsync(fn) { /**
* You probably want to use `wrap` instead of this.
*
* Runs a callback passing this log item,
* recording the timing and any error.
*
* callback can return a Promise.
*
* Can only be called once, and will throw after.
*
* @param {Function} callback [description]
* @return {[type]} [description]
*/
run(callback) {
if (this._end !== null) {
throw new Error("item is finished");
}
let result;
try { try {
const ret = await fn(this); result = callback(this);
this.finish(); if (result instanceof Promise) {
return ret; return result.then(promiseResult => {
this._finish();
return promiseResult;
}, err => {
this._catch(err);
this._finish();
throw err;
});
} else {
this._finish();
return result;
}
} catch (err) { } catch (err) {
this.fail(err); this._catch(err);
this._finish();
throw err; throw err;
} }
} }
_catch(err) {
this._error = err;
console.error(`log item ${this.values.label} failed: ${err.message}:\n${err.stack}`);
}
_createChild(labelOrValues, logLevel) {
if (this._end !== null) {
throw new Error("item is finished");
}
const item = new LogItem(labelOrValues, logLevel, this._platform);
this._children.push(item);
return item;
}
_finish() {
if (this._end === null) {
for(const c of this._children) {
c._finish();
}
this._end = this._platform.clock.now();
}
}
} }

View file

@ -1,51 +0,0 @@
/*
Copyright 2020 Bruno Windels <bruno@windels.cloud>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import {LogItem} from "./LogItem.js";
export class Logger {
constructor(persister) {
this._openItems = [];
this._persister = persister;
this._closing = false;
}
restore() {
const items = this._persister.loadTempItems();
return this._persister.persistItems(items);
}
start(label, logLevel) {
const item = new LogItem(label, logLevel, this);
this._openItems.push(item);
return item;
}
_closeChild(item) {
if (!this._closing) {
this._persister.persistItems([item.serialize()]);
}
}
close() {
this._closing = true;
for(const i of this._openItems) {
i.finish();
}
this._closing = false;
this._persister.persistTempItems(this._openItems.map(i => i.serialize()));
}
}

View file

@ -54,6 +54,12 @@ export function encodeUint32(n) {
return "0".repeat(8 - hex.length) + hex; return "0".repeat(8 - hex.length) + hex;
} }
// used for logs where timestamp is part of key, which is larger than 32 bit
export function encodeUint64(n) {
const hex = n.toString(16);
return "0".repeat(16 - hex.length) + hex;
}
export function decodeUint32(str) { export function decodeUint32(str) {
return parseInt(str, 16); return parseInt(str, 16);
} }