Merge pull request #3523 from matrix-org/dbkr/rageshake_limit_once

Only limit the rageshake log size in one place
pull/21833/head
David Baker 2019-10-07 12:06:30 +01:00 committed by GitHub
commit 7d96517139
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 16 additions and 12 deletions

View File

@ -254,7 +254,8 @@ class IndexedDBLogStore {
const db = this.db; const db = this.db;
// Returns: a string representing the concatenated logs for this ID. // Returns: a string representing the concatenated logs for this ID.
function fetchLogs(id) { // Stops adding log fragments when the size exceeds maxSize
function fetchLogs(id, maxSize) {
const objectStore = db.transaction("logs", "readonly").objectStore("logs"); const objectStore = db.transaction("logs", "readonly").objectStore("logs");
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
@ -269,10 +270,10 @@ class IndexedDBLogStore {
resolve(lines); resolve(lines);
return; // end of results return; // end of results
} }
if (lines.length + cursor.value.lines.length >= MAX_LOG_SIZE && lines.length > 0) { lines = cursor.value.lines + lines;
if (lines.length >= maxSize) {
resolve(lines); resolve(lines);
} else { } else {
lines = cursor.value.lines + lines;
cursor.continue(); cursor.continue();
} }
}; };
@ -336,22 +337,25 @@ class IndexedDBLogStore {
const logs = []; const logs = [];
let size = 0; let size = 0;
for (let i = 0; i < allLogIds.length; i++) { for (let i = 0; i < allLogIds.length; i++) {
const lines = await fetchLogs(allLogIds[i]); const lines = await fetchLogs(allLogIds[i], MAX_LOG_SIZE - size);
// always include at least one log file, but only include // always add the log file: fetchLogs will truncate once the maxSize we give it is
// subsequent ones if they won't take us over the MAX_LOG_SIZE // exceeded, so we'll go over the max but only by one fragment's worth.
if (i > 0 && size + lines.length > MAX_LOG_SIZE) { logs.push({
lines: lines,
id: allLogIds[i],
});
size += lines.length;
// If fetchLogs truncated we'll now be at or over the size limit,
// in which case we should stop and remove the rest of the log files.
if (size >= MAX_LOG_SIZE) {
// the remaining log IDs should be removed. If we go out of // the remaining log IDs should be removed. If we go out of
// bounds this is just [] // bounds this is just []
removeLogIds = allLogIds.slice(i + 1); removeLogIds = allLogIds.slice(i + 1);
break; break;
} }
logs.push({
lines: lines,
id: allLogIds[i],
});
size += lines.length;
} }
if (removeLogIds.length > 0) { if (removeLogIds.length > 0) {
console.log("Removing logs: ", removeLogIds); console.log("Removing logs: ", removeLogIds);