Merge pull request #853 from vector-im/dbkr/better_rageshake

Change rageshake to save much more regularly
This commit is contained in:
David Baker 2023-01-17 10:20:26 +00:00 committed by GitHub
commit 9437a00997
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 89 additions and 88 deletions

View file

@ -45,6 +45,7 @@
"i18next": "^21.10.0", "i18next": "^21.10.0",
"i18next-browser-languagedetector": "^6.1.8", "i18next-browser-languagedetector": "^6.1.8",
"i18next-http-backend": "^1.4.4", "i18next-http-backend": "^1.4.4",
"lodash": "^4.17.21",
"matrix-js-sdk": "github:matrix-org/matrix-js-sdk#7b10fa367df357b51c2e78e220d39e5e7967f9e3", "matrix-js-sdk": "github:matrix-org/matrix-js-sdk#7b10fa367df357b51c2e78e220d39e5e7967f9e3",
"matrix-widget-api": "^1.0.0", "matrix-widget-api": "^1.0.0",
"mermaid": "^8.13.8", "mermaid": "^8.13.8",

View file

@ -1,21 +1,4 @@
/* /*
Copyright 2022 New Vector Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/* eslint-disable @typescript-eslint/ban-ts-comment */
/*
Copyright 2017 OpenMarket Ltd Copyright 2017 OpenMarket Ltd
Copyright 2018 New Vector Ltd Copyright 2018 New Vector Ltd
Copyright 2019 The New Vector Ltd Copyright 2019 The New Vector Ltd
@ -54,15 +37,23 @@ limitations under the License.
// actually timestamps. We then purge the remaining logs. We also do this // actually timestamps. We then purge the remaining logs. We also do this
// purge on startup to prevent logs from accumulating. // purge on startup to prevent logs from accumulating.
import EventEmitter from "events";
import { throttle } from "lodash";
import { logger } from "matrix-js-sdk/src/logger"; import { logger } from "matrix-js-sdk/src/logger";
import { randomString } from "matrix-js-sdk/src/randomstring"; import { randomString } from "matrix-js-sdk/src/randomstring";
// the frequency with which we flush to indexeddb
const FLUSH_RATE_MS = 30 * 1000;
// the length of log data we keep in indexeddb (and include in the reports) // the length of log data we keep in indexeddb (and include in the reports)
const MAX_LOG_SIZE = 1024 * 1024 * 5; // 5 MB const MAX_LOG_SIZE = 1024 * 1024 * 5; // 5 MB
// Shortest amount of time between flushes. We are just appending to an
// IndexedDB table so we don't expect flushing to be that expensive, but
// we can batch the writes a little.
const MAX_FLUSH_INTERVAL_MS = 2 * 1000;
enum ConsoleLoggerEvent {
Log = "log",
}
type LogFunction = ( type LogFunction = (
...args: (Error | DOMException | object | string)[] ...args: (Error | DOMException | object | string)[]
) => void; ) => void;
@ -76,7 +67,7 @@ interface LogEntry {
index?: number; index?: number;
} }
export class ConsoleLogger { export class ConsoleLogger extends EventEmitter {
private logs = ""; private logs = "";
private originalFunctions: { [key in LogFunctionName]?: LogFunction } = {}; private originalFunctions: { [key in LogFunctionName]?: LogFunction } = {};
@ -99,13 +90,6 @@ export class ConsoleLogger {
}); });
} }
public bypassRageshake(
fnName: LogFunctionName,
...args: (Error | DOMException | object | string)[]
): void {
this.originalFunctions[fnName](...args);
}
public log( public log(
level: string, level: string,
...args: (Error | DOMException | object | string)[] ...args: (Error | DOMException | object | string)[]
@ -137,23 +121,27 @@ export class ConsoleLogger {
// Using + really is the quickest way in JS // Using + really is the quickest way in JS
// http://jsperf.com/concat-vs-plus-vs-join // http://jsperf.com/concat-vs-plus-vs-join
this.logs += line; this.logs += line;
this.emit(ConsoleLoggerEvent.Log);
} }
/** /**
* Retrieve log lines to flush to disk. * Returns the log lines to flush to disk and empties the internal log buffer
* @param {boolean} keepLogs True to not delete logs after flushing. * @return {string} \n delimited log lines
* @return {string} \n delimited log lines to flush.
*/ */
public flush(keepLogs?: boolean): string { public popLogs(): string {
// The ConsoleLogger doesn't care how these end up on disk, it just
// flushes them to the caller.
if (keepLogs) {
return this.logs;
}
const logsToFlush = this.logs; const logsToFlush = this.logs;
this.logs = ""; this.logs = "";
return logsToFlush; return logsToFlush;
} }
/**
* Returns lines currently in the log buffer without removing them
* @return {string} \n delimited log lines
*/
public peekLogs(): string {
return this.logs;
}
} }
// A class which stores log lines in an IndexedDB instance. // A class which stores log lines in an IndexedDB instance.
@ -164,8 +152,14 @@ export class IndexedDBLogStore {
private flushAgainPromise: Promise<void> = null; private flushAgainPromise: Promise<void> = null;
private id: string; private id: string;
constructor(private indexedDB: IDBFactory, private logger: ConsoleLogger) { constructor(
private indexedDB: IDBFactory,
private loggerInstance: ConsoleLogger
) {
this.id = "instance-" + randomString(16); this.id = "instance-" + randomString(16);
loggerInstance.on(ConsoleLoggerEvent.Log, this.onLoggerLog);
window.addEventListener("beforeunload", this.flush);
} }
/** /**
@ -174,30 +168,31 @@ export class IndexedDBLogStore {
public connect(): Promise<void> { public connect(): Promise<void> {
const req = this.indexedDB.open("logs"); const req = this.indexedDB.open("logs");
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
req.onsuccess = (event: Event) => { req.onsuccess = () => {
// @ts-ignore this.db = req.result;
this.db = event.target.result;
// Periodically flush logs to local storage / indexeddb
setInterval(this.flush.bind(this), FLUSH_RATE_MS);
resolve(); resolve();
}; };
req.onerror = (event) => { req.onerror = () => {
const err = const err = "Failed to open log database: " + req.error.name;
// @ts-ignore
"Failed to open log database: " + event.target.error.name;
logger.error(err); logger.error(err);
reject(new Error(err)); reject(new Error(err));
}; };
// First time: Setup the object store // First time: Setup the object store
req.onupgradeneeded = (event) => { req.onupgradeneeded = () => {
// @ts-ignore const db = req.result;
const db = event.target.result; // This is the log entries themselves. Each entry is a chunk of
// logs (ie multiple lines). 'id' is the instance ID (so logs with
// the same instance ID are all from the same session) and 'index'
// is a sequence number for the chunk. The log lines live in the
// 'lines' key, which is a chunk of many newline-separated log lines.
const logObjStore = db.createObjectStore("logs", { const logObjStore = db.createObjectStore("logs", {
keyPath: ["id", "index"], keyPath: ["id", "index"],
}); });
// Keys in the database look like: [ "instance-148938490", 0 ] // Keys in the database look like: [ "instance-148938490", 0 ]
// (The instance ID plus the ID of each log chunk).
// Later on we need to query everything based on an instance id. // Later on we need to query everything based on an instance id.
// In order to do this, we need to set up indexes "id". // In order to do this, we need to set up indexes "id".
logObjStore.createIndex("id", "id", { unique: false }); logObjStore.createIndex("id", "id", { unique: false });
@ -206,6 +201,9 @@ export class IndexedDBLogStore {
this.generateLogEntry(new Date() + " ::: Log database was created.") this.generateLogEntry(new Date() + " ::: Log database was created.")
); );
// This records the last time each instance ID generated a log message, such
// that the logs from each session can be collated in the order they last logged
// something.
const lastModifiedStore = db.createObjectStore("logslastmod", { const lastModifiedStore = db.createObjectStore("logslastmod", {
keyPath: "id", keyPath: "id",
}); });
@ -214,6 +212,26 @@ export class IndexedDBLogStore {
}); });
} }
private onLoggerLog = () => {
if (!this.db) return;
this.throttledFlush();
};
// Throttled function to flush logs. We use throttle rather
// than debounce as we want logs to be written regularly, otherwise
// if there's a constant stream of logging, we'd never write anything.
private throttledFlush = throttle(
() => {
this.flush();
},
MAX_FLUSH_INTERVAL_MS,
{
leading: false,
trailing: true,
}
);
/** /**
* Flush logs to disk. * Flush logs to disk.
* *
@ -233,7 +251,7 @@ export class IndexedDBLogStore {
* *
* @return {Promise} Resolved when the logs have been flushed. * @return {Promise} Resolved when the logs have been flushed.
*/ */
public flush(): Promise<void> { public flush = (): Promise<void> => {
// check if a flush() operation is ongoing // check if a flush() operation is ongoing
if (this.flushPromise) { if (this.flushPromise) {
if (this.flushAgainPromise) { if (this.flushAgainPromise) {
@ -258,20 +276,19 @@ export class IndexedDBLogStore {
reject(new Error("No connected database")); reject(new Error("No connected database"));
return; return;
} }
const lines = this.logger.flush(); const lines = this.loggerInstance.popLogs();
if (lines.length === 0) { if (lines.length === 0) {
resolve(); resolve();
return; return;
} }
const txn = this.db.transaction(["logs", "logslastmod"], "readwrite"); const txn = this.db.transaction(["logs", "logslastmod"], "readwrite");
const objStore = txn.objectStore("logs"); const objStore = txn.objectStore("logs");
txn.oncomplete = (event) => { txn.oncomplete = () => {
resolve(); resolve();
}; };
txn.onerror = (event) => { txn.onerror = (event) => {
logger.error("Failed to flush logs : ", event); logger.error("Failed to flush logs : ", event);
// @ts-ignore reject(new Error("Failed to write logs: " + txn.error.message));
reject(new Error("Failed to write logs: " + event.target.errorCode));
}; };
objStore.add(this.generateLogEntry(lines)); objStore.add(this.generateLogEntry(lines));
const lastModStore = txn.objectStore("logslastmod"); const lastModStore = txn.objectStore("logslastmod");
@ -280,7 +297,7 @@ export class IndexedDBLogStore {
this.flushPromise = null; this.flushPromise = null;
}); });
return this.flushPromise; return this.flushPromise;
} };
/** /**
* Consume the most recent logs and return them. Older logs which are not * Consume the most recent logs and return them. Older logs which are not
@ -307,13 +324,11 @@ export class IndexedDBLogStore {
.index("id") .index("id")
.openCursor(IDBKeyRange.only(id), "prev"); .openCursor(IDBKeyRange.only(id), "prev");
let lines = ""; let lines = "";
query.onerror = (event) => { query.onerror = () => {
// @ts-ignore reject(new Error("Query failed: " + query.error.message));
reject(new Error("Query failed: " + event.target.errorCode));
}; };
query.onsuccess = (event) => { query.onsuccess = () => {
// @ts-ignore const cursor = query.result;
const cursor = event.target.result;
if (!cursor) { if (!cursor) {
resolve(lines); resolve(lines);
return; // end of results return; // end of results
@ -355,9 +370,8 @@ export class IndexedDBLogStore {
const o = txn.objectStore("logs"); const o = txn.objectStore("logs");
// only load the key path, not the data which may be huge // only load the key path, not the data which may be huge
const query = o.index("id").openKeyCursor(IDBKeyRange.only(id)); const query = o.index("id").openKeyCursor(IDBKeyRange.only(id));
query.onsuccess = (event) => { query.onsuccess = () => {
// @ts-ignore const cursor = query.result;
const cursor = event.target.result;
if (!cursor) { if (!cursor) {
return; return;
} }
@ -367,12 +381,10 @@ export class IndexedDBLogStore {
txn.oncomplete = () => { txn.oncomplete = () => {
resolve(); resolve();
}; };
txn.onerror = (event) => { txn.onerror = () => {
reject( reject(
new Error( new Error(
"Failed to delete logs for " + "Failed to delete logs for " + `'${id}' : ${txn.error.message}`
// @ts-ignore
`'${id}' : ${event.target.errorCode}`
) )
); );
}; };
@ -456,14 +468,12 @@ function selectQuery<T>(
const query = store.openCursor(keyRange); const query = store.openCursor(keyRange);
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const results = []; const results = [];
query.onerror = (event) => { query.onerror = () => {
// @ts-ignore reject(new Error("Query failed: " + query.error.message));
reject(new Error("Query failed: " + event.target.errorCode));
}; };
// collect results // collect results
query.onsuccess = (event) => { query.onsuccess = () => {
// @ts-ignore const cursor = query.result;
const cursor = event.target.result;
if (!cursor) { if (!cursor) {
resolve(results); resolve(results);
return; // end of results return; // end of results
@ -479,8 +489,6 @@ declare global {
// eslint-disable-next-line no-var, camelcase // eslint-disable-next-line no-var, camelcase
var mx_rage_logger: ConsoleLogger; var mx_rage_logger: ConsoleLogger;
// eslint-disable-next-line no-var, camelcase // eslint-disable-next-line no-var, camelcase
var mx_rage_initPromise: Promise<void>;
// eslint-disable-next-line no-var, camelcase
var mx_rage_initStoragePromise: Promise<void>; var mx_rage_initStoragePromise: Promise<void>;
} }
@ -491,19 +499,11 @@ declare global {
* be set up immediately for the logs. * be set up immediately for the logs.
* @return {Promise} Resolves when set up. * @return {Promise} Resolves when set up.
*/ */
export function init(setUpPersistence = true): Promise<void> { export function init(): Promise<void> {
if (global.mx_rage_initPromise) {
return global.mx_rage_initPromise;
}
global.mx_rage_logger = new ConsoleLogger(); global.mx_rage_logger = new ConsoleLogger();
global.mx_rage_logger.monkeyPatch(window.console); global.mx_rage_logger.monkeyPatch(window.console);
if (setUpPersistence) { return tryInitStorage();
return tryInitStorage();
}
global.mx_rage_initPromise = Promise.resolve();
return global.mx_rage_initPromise;
} }
/** /**
@ -573,7 +573,7 @@ export async function getLogsForReport(): Promise<LogEntry[]> {
} else { } else {
return [ return [
{ {
lines: global.mx_rage_logger.flush(true), lines: global.mx_rage_logger.peekLogs(),
id: "-", id: "-",
}, },
]; ];