Skip to content

Commit

Permalink
fix(serverless-api/logs): keeps the most recent logs in log cache
Browse files Browse the repository at this point in the history
Another note, I discovered while testing this that _read() is called multiple times and lead to extra API calls via _poll(). This was because when calling _read() with an interval set, the code would make a one off call. Now, if an interval is set already, then no extra polls will be made.
  • Loading branch information
philnash authored and dkundel committed Oct 29, 2020
1 parent e8c02dc commit 0f3b317
Showing 1 changed file with 37 additions and 11 deletions.
48 changes: 37 additions & 11 deletions packages/serverless-api/src/streams/logs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ export class LogsStream extends Readable {
private _pollingCacheSize: number;
private _interval: NodeJS.Timeout | undefined;
private _viewedSids: Set<Sid>;
private _viewedLogs: Array<{ sid: Sid; dateCreated: Date }>;

constructor(
private environmentSid: Sid,
Expand All @@ -19,6 +20,7 @@ export class LogsStream extends Readable {
super({ objectMode: true });
this._interval = undefined;
this._viewedSids = new Set();
this._viewedLogs = [];
this._pollingFrequency = config.pollingFrequency || 1000;
this._pollingCacheSize = config.logCacheSize || 1000;
}
Expand Down Expand Up @@ -54,13 +56,35 @@ export class LogsStream extends Readable {
// The logs endpoint is not reliably returning logs in the same order
// Therefore we need to keep a set of all previously seen log entries
// In order to avoid memory leaks we cap the total size of logs at 1000
// If the new set is larger we'll instead only use the SIDs from the current
// request.
if (logs.length + this._viewedSids.size <= this._pollingCacheSize) {
logs.map(log => log.sid).forEach(sid => this._viewedSids.add(sid));
} else {
this._viewedSids = new Set(logs.map(log => log.sid));
}
// (or the set pollingCacheSize).
//
// We store an array of the logs' SIDs and created dates.
// Then when a new page of logs is added, we find the unique logs, sort by
// date created, newest to oldest, and chop off the end of the array (the
// oldest logs) leaving the most recent logs in memory. We then turn that
// into a set of SIDs to check against next time.

// Creates a unique set of log sids and date created from previous logs
// and new logs by stringifying the sid and the date together.
const viewedLogsSet = new Set([
...this._viewedLogs.map(
log => `${log.sid}-${log.dateCreated.toISOString()}`
),
...logs.map(log => `${log.sid}-${log.date_created}`),
]);
// Then we take that set, map over the logs and split them up into sid and
// date again, sort them most to least recent and chop off the oldest if
// they are beyond the polling cache size.
this._viewedLogs = [...viewedLogsSet]
.map(logString => {
const [sid, dateCreated] = logString.split('-');
return { sid, dateCreated: new Date(dateCreated) };
})
.sort((a, b) => b.dateCreated.valueOf() - a.dateCreated.valueOf())
.slice(0, this._pollingCacheSize);
// Finally we create a set of just SIDs to compare against.
this._viewedSids = new Set(this._viewedLogs.map(log => log.sid));

if (!this.config.tail) {
this.push(null);
}
Expand All @@ -70,10 +94,12 @@ export class LogsStream extends Readable {
}

_read() {
if (this.config.tail && !this._interval) {
this._interval = setInterval(() => {
this._poll();
}, this._pollingFrequency);
if (this.config.tail) {
if (!this._interval) {
this._interval = setInterval(() => {
this._poll();
}, this._pollingFrequency);
}
} else {
this._poll();
}
Expand Down

0 comments on commit 0f3b317

Please sign in to comment.