576 lines
18 KiB
JavaScript
576 lines
18 KiB
JavaScript
/**
|
|
* Log Digest Module
|
|
* Collects container logs hourly, generates daily summaries.
|
|
* Gives users a single place to see what happened across all services
|
|
* and guidance on where to look for more detail.
|
|
*/
|
|
|
|
const Docker = require('dockerode');
|
|
const EventEmitter = require('events');
|
|
const fs = require('fs');
|
|
const fsp = require('fs').promises;
|
|
const path = require('path');
|
|
const { DOCKER } = require('./constants');
|
|
|
|
const docker = new Docker();
|
|
|
|
const ERROR_PATTERNS = [
|
|
/\berror\b/i, /\bfailed\b/i, /\bfatal\b/i, /\bpanic\b/i,
|
|
/\bcrash(ed)?\b/i, /\bexception\b/i, /\btimeout\b/i,
|
|
/\bOOM\b/, /\bout of memory\b/i, /\bkilled\b/i,
|
|
/\bdenied\b/i, /\bunauthorized\b/i, /\brefused\b/i,
|
|
];
|
|
|
|
const WARNING_PATTERNS = [
|
|
/\bwarn(ing)?\b/i, /\bdeprecated\b/i, /\bretry(ing)?\b/i,
|
|
/\bslow\b/i, /\blatency\b/i,
|
|
];
|
|
|
|
const EVENT_PATTERNS = [
|
|
{ pattern: /\b(start(ed|ing)?|boot(ed|ing)?|init(ializ(ed|ing))?)\b/i, type: 'startup' },
|
|
{ pattern: /\b(stop(ped|ping)?|shutdown|exit(ed|ing)?|terminat(ed|ing)?)\b/i, type: 'shutdown' },
|
|
{ pattern: /\b(restart(ed|ing)?|reload(ed|ing)?)\b/i, type: 'restart' },
|
|
{ pattern: /\bhealth.?check.*(fail|unhealthy)\b/i, type: 'health_failure' },
|
|
{ pattern: /\b(update|upgrade|migration)\b/i, type: 'update' },
|
|
];
|
|
|
|
class LogDigest extends EventEmitter {
|
|
constructor() {
|
|
super();
|
|
this.collectInterval = null;
|
|
this.digestTimeout = null;
|
|
this.running = false;
|
|
this.hourlySummaries = []; // Ring buffer of hourly snapshots
|
|
this.digestDir = null; // Set during start()
|
|
this.lastCollect = null;
|
|
this._lastCollectTimestamp = {}; // Per-container: last log timestamp fetched
|
|
}
|
|
|
|
/**
|
|
* Start the log digest system.
|
|
* @param {string} digestDir - Directory to write daily digest files
|
|
*/
|
|
start(digestDir) {
|
|
if (this.running) return;
|
|
this.running = true;
|
|
this.digestDir = digestDir;
|
|
|
|
// Ensure digest directory exists
|
|
if (!fs.existsSync(digestDir)) {
|
|
fs.mkdirSync(digestDir, { recursive: true });
|
|
}
|
|
|
|
// Collect logs every hour
|
|
this.collectInterval = setInterval(() => {
|
|
this._collectHourlyLogs().catch(e =>
|
|
console.error('[LogDigest] Hourly collection failed:', e.message),
|
|
);
|
|
}, DOCKER.DIGEST.COLLECT_INTERVAL);
|
|
|
|
// Schedule daily digest generation
|
|
this._scheduleDailyDigest();
|
|
|
|
// Run initial collection after 2 minutes
|
|
setTimeout(() => {
|
|
if (this.running) {
|
|
this._collectHourlyLogs().catch(() => {});
|
|
}
|
|
}, 2 * 60 * 1000);
|
|
}
|
|
|
|
stop() {
|
|
if (!this.running) return;
|
|
this.running = false;
|
|
if (this.collectInterval) {
|
|
clearInterval(this.collectInterval);
|
|
this.collectInterval = null;
|
|
}
|
|
if (this.digestTimeout) {
|
|
clearTimeout(this.digestTimeout);
|
|
this.digestTimeout = null;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Collect logs from all managed containers for the last hour.
|
|
*/
|
|
async _collectHourlyLogs() {
|
|
const now = new Date();
|
|
const sinceTimestamp = Math.floor((now.getTime() - DOCKER.DIGEST.COLLECT_INTERVAL) / 1000);
|
|
const hourKey = `${now.getFullYear()}-${String(now.getMonth() + 1).padStart(2, '0')}-${String(now.getDate()).padStart(2, '0')}T${String(now.getHours()).padStart(2, '0')}:00`;
|
|
|
|
const hourSummary = {
|
|
hour: hourKey,
|
|
timestamp: now.toISOString(),
|
|
services: {},
|
|
};
|
|
|
|
try {
|
|
const containers = await docker.listContainers({ all: true });
|
|
const managed = containers.filter(c => c.Labels?.['sami.managed'] === 'true');
|
|
|
|
for (const containerInfo of managed) {
|
|
const name = containerInfo.Names[0]?.replace(/^\//, '') || containerInfo.Id.slice(0, 12);
|
|
const appId = containerInfo.Labels['sami.app'] || name;
|
|
const isRunning = containerInfo.State === 'running';
|
|
|
|
const serviceSummary = {
|
|
name,
|
|
appId,
|
|
state: containerInfo.State,
|
|
errors: [],
|
|
warnings: [],
|
|
events: [],
|
|
errorCount: 0,
|
|
warningCount: 0,
|
|
totalLines: 0,
|
|
};
|
|
|
|
if (isRunning) {
|
|
try {
|
|
const container = docker.getContainer(containerInfo.Id);
|
|
const logBuffer = await container.logs({
|
|
stdout: true,
|
|
stderr: true,
|
|
since: sinceTimestamp,
|
|
tail: DOCKER.DIGEST.LOG_TAIL,
|
|
timestamps: true,
|
|
});
|
|
|
|
const lines = this._parseDockerLogs(logBuffer);
|
|
serviceSummary.totalLines = lines.length;
|
|
|
|
for (const line of lines) {
|
|
// Check for errors
|
|
if (line.stream === 'stderr' || ERROR_PATTERNS.some(p => p.test(line.text))) {
|
|
serviceSummary.errorCount++;
|
|
if (serviceSummary.errors.length < 10) {
|
|
serviceSummary.errors.push({
|
|
time: line.timestamp || hourKey,
|
|
text: line.text.slice(0, 500),
|
|
});
|
|
}
|
|
continue;
|
|
}
|
|
|
|
// Check for warnings
|
|
if (WARNING_PATTERNS.some(p => p.test(line.text))) {
|
|
serviceSummary.warningCount++;
|
|
if (serviceSummary.warnings.length < 5) {
|
|
serviceSummary.warnings.push({
|
|
time: line.timestamp || hourKey,
|
|
text: line.text.slice(0, 300),
|
|
});
|
|
}
|
|
continue;
|
|
}
|
|
|
|
// Check for notable events
|
|
for (const { pattern, type } of EVENT_PATTERNS) {
|
|
if (pattern.test(line.text)) {
|
|
serviceSummary.events.push({
|
|
type,
|
|
time: line.timestamp || hourKey,
|
|
text: line.text.slice(0, 300),
|
|
});
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
} catch (logErr) {
|
|
serviceSummary.errors.push({
|
|
time: now.toISOString(),
|
|
text: `Failed to fetch logs: ${logErr.message}`,
|
|
});
|
|
serviceSummary.errorCount++;
|
|
}
|
|
} else {
|
|
serviceSummary.events.push({
|
|
type: 'not_running',
|
|
time: now.toISOString(),
|
|
text: `Container is ${containerInfo.State}`,
|
|
});
|
|
}
|
|
|
|
hourSummary.services[appId] = serviceSummary;
|
|
}
|
|
} catch (e) {
|
|
console.error('[LogDigest] Container enumeration failed:', e.message);
|
|
}
|
|
|
|
// Add to ring buffer
|
|
this.hourlySummaries.push(hourSummary);
|
|
if (this.hourlySummaries.length > DOCKER.DIGEST.MAX_HOURLY_ENTRIES) {
|
|
this.hourlySummaries.shift();
|
|
}
|
|
|
|
this.lastCollect = now.toISOString();
|
|
this.emit('hourly-collected', hourSummary);
|
|
return hourSummary;
|
|
}
|
|
|
|
/**
|
|
* Parse Docker multiplexed log stream into lines.
|
|
*/
|
|
_parseDockerLogs(logData) {
|
|
const lines = [];
|
|
const buffer = Buffer.isBuffer(logData) ? logData : Buffer.from(logData);
|
|
let offset = 0;
|
|
|
|
while (offset < buffer.length) {
|
|
if (offset + 8 > buffer.length) break;
|
|
const streamType = buffer[0 + offset];
|
|
const size = buffer.readUInt32BE(4 + offset);
|
|
if (offset + 8 + size > buffer.length) break;
|
|
|
|
const text = buffer.slice(offset + 8, offset + 8 + size).toString('utf8').trim();
|
|
if (text) {
|
|
// Try to extract timestamp from Docker's format: "2026-03-13T12:00:00.000000000Z message"
|
|
let timestamp = null;
|
|
let message = text;
|
|
const tsMatch = text.match(/^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})\.\d+Z\s(.*)$/s);
|
|
if (tsMatch) {
|
|
timestamp = tsMatch[1];
|
|
message = tsMatch[2];
|
|
}
|
|
|
|
lines.push({
|
|
stream: streamType === 2 ? 'stderr' : 'stdout',
|
|
text: message,
|
|
timestamp,
|
|
});
|
|
}
|
|
offset += 8 + size;
|
|
}
|
|
return lines;
|
|
}
|
|
|
|
/**
|
|
* Schedule the daily digest at the configured hour.
|
|
*/
|
|
_scheduleDailyDigest() {
|
|
const now = new Date();
|
|
const targetHour = DOCKER.DIGEST.DIGEST_HOUR;
|
|
const next = new Date(now);
|
|
next.setHours(targetHour, 5, 0, 0); // 5 minutes past the hour
|
|
if (next <= now) next.setDate(next.getDate() + 1);
|
|
|
|
const delay = next.getTime() - now.getTime();
|
|
this.digestTimeout = setTimeout(() => {
|
|
this.generateDailyDigest().catch(e =>
|
|
console.error('[LogDigest] Daily digest generation failed:', e.message),
|
|
);
|
|
// Reschedule for tomorrow
|
|
if (this.running) this._scheduleDailyDigest();
|
|
}, delay);
|
|
}
|
|
|
|
/**
|
|
* Generate the daily digest from accumulated hourly summaries.
|
|
* Can also be called on-demand.
|
|
*/
|
|
async generateDailyDigest(dateStr) {
|
|
const date = dateStr || new Date(Date.now() - 86400000).toISOString().slice(0, 10);
|
|
const relevantHours = this.hourlySummaries.filter(h => h.hour.startsWith(date));
|
|
|
|
// Aggregate per-service stats across all hours
|
|
const serviceAgg = {};
|
|
const notableEvents = [];
|
|
|
|
for (const hour of relevantHours) {
|
|
for (const [appId, svc] of Object.entries(hour.services)) {
|
|
if (!serviceAgg[appId]) {
|
|
serviceAgg[appId] = {
|
|
name: svc.name,
|
|
appId,
|
|
totalErrors: 0,
|
|
totalWarnings: 0,
|
|
totalLines: 0,
|
|
lastState: svc.state,
|
|
topErrors: [],
|
|
events: [],
|
|
};
|
|
}
|
|
const agg = serviceAgg[appId];
|
|
agg.totalErrors += svc.errorCount;
|
|
agg.totalWarnings += svc.warningCount;
|
|
agg.totalLines += svc.totalLines;
|
|
agg.lastState = svc.state;
|
|
|
|
// Keep top errors (deduplicated-ish)
|
|
for (const err of svc.errors) {
|
|
if (agg.topErrors.length < 5) {
|
|
agg.topErrors.push(err);
|
|
}
|
|
}
|
|
|
|
// Collect notable events
|
|
for (const evt of svc.events) {
|
|
notableEvents.push({ ...evt, service: svc.name, appId });
|
|
}
|
|
}
|
|
}
|
|
|
|
// Get Docker disk usage
|
|
let diskUsage = null;
|
|
try {
|
|
const dockerMaintenance = require('./docker-maintenance');
|
|
diskUsage = await dockerMaintenance.getDiskUsage();
|
|
} catch (e) {
|
|
// Module may not be loaded yet
|
|
}
|
|
|
|
// Build digest object
|
|
const digest = {
|
|
date,
|
|
generatedAt: new Date().toISOString(),
|
|
hoursCollected: relevantHours.length,
|
|
services: serviceAgg,
|
|
notableEvents: notableEvents.sort((a, b) => (a.time || '').localeCompare(b.time || '')),
|
|
diskUsage,
|
|
summary: {
|
|
totalServices: Object.keys(serviceAgg).length,
|
|
servicesWithErrors: Object.values(serviceAgg).filter(s => s.totalErrors > 0).length,
|
|
totalErrors: Object.values(serviceAgg).reduce((sum, s) => sum + s.totalErrors, 0),
|
|
totalWarnings: Object.values(serviceAgg).reduce((sum, s) => sum + s.totalWarnings, 0),
|
|
},
|
|
};
|
|
|
|
// Write formatted digest file
|
|
const formatted = this._formatDigest(digest);
|
|
const filename = `digest-${date}.log`;
|
|
const filepath = path.join(this.digestDir, filename);
|
|
await fsp.writeFile(filepath, formatted, 'utf8');
|
|
|
|
// Also write JSON for API consumption
|
|
const jsonPath = path.join(this.digestDir, `digest-${date}.json`);
|
|
await fsp.writeFile(jsonPath, JSON.stringify(digest, null, 2), 'utf8');
|
|
|
|
// Cleanup old digests
|
|
await this._cleanupOldDigests();
|
|
|
|
this.emit('digest-generated', { date, filepath, digest });
|
|
return digest;
|
|
}
|
|
|
|
/**
|
|
* Format digest into human-readable text.
|
|
*/
|
|
_formatDigest(digest) {
|
|
const lines = [];
|
|
const hr = '='.repeat(55);
|
|
const sr = '-'.repeat(55);
|
|
|
|
lines.push(hr);
|
|
lines.push(' DashCaddy Daily Log Digest');
|
|
lines.push(` ${digest.date}`);
|
|
lines.push(` Generated: ${digest.generatedAt}`);
|
|
lines.push(hr);
|
|
lines.push('');
|
|
|
|
// Service summary table
|
|
lines.push(`-- Service Summary ${ '-'.repeat(36)}`);
|
|
const services = Object.values(digest.services);
|
|
if (services.length === 0) {
|
|
lines.push(' No managed services found.');
|
|
} else {
|
|
for (const svc of services) {
|
|
const stateIcon = svc.lastState === 'running' ? 'OK' : '!!';
|
|
const errStr = `${svc.totalErrors} error${svc.totalErrors !== 1 ? 's' : ''}`;
|
|
const warnStr = `${svc.totalWarnings} warning${svc.totalWarnings !== 1 ? 's' : ''}`;
|
|
const flag = svc.totalErrors > 0 ? ' <-- investigate' : '';
|
|
lines.push(` ${svc.name.padEnd(18)} ${stateIcon.padEnd(10)} ${errStr.padEnd(14)} ${warnStr}${flag}`);
|
|
}
|
|
}
|
|
lines.push('');
|
|
|
|
// Notable events
|
|
const events = digest.notableEvents;
|
|
if (events.length > 0) {
|
|
lines.push(`-- Notable Events ${ '-'.repeat(37)}`);
|
|
for (const evt of events) {
|
|
const time = (evt.time || '').slice(11, 16) || '??:??';
|
|
lines.push(` [${time}] ${evt.service}: ${evt.text.slice(0, 80)}`);
|
|
// Add guidance for where to look further
|
|
const containerName = `${DOCKER.CONTAINER_PREFIX}${evt.appId}`;
|
|
if (evt.type === 'health_failure' || evt.type === 'restart') {
|
|
const sinceDate = `${digest.date }T${ (evt.time || '').slice(11, 13) }:00:00`;
|
|
lines.push(` See: docker logs ${containerName} --since ${sinceDate}`);
|
|
}
|
|
}
|
|
lines.push('');
|
|
}
|
|
|
|
// Top errors per service
|
|
const errServices = services.filter(s => s.totalErrors > 0);
|
|
if (errServices.length > 0) {
|
|
lines.push(`-- Error Details ${ '-'.repeat(38)}`);
|
|
for (const svc of errServices) {
|
|
lines.push(` ${svc.name} (${svc.totalErrors} errors):`);
|
|
for (const err of svc.topErrors) {
|
|
const time = (err.time || '').slice(11, 16) || '??:??';
|
|
lines.push(` [${time}] ${err.text.slice(0, 100)}`);
|
|
}
|
|
const containerName = `${DOCKER.CONTAINER_PREFIX}${svc.appId}`;
|
|
lines.push(` Full logs: docker logs ${containerName} --since ${digest.date}T00:00:00`);
|
|
lines.push('');
|
|
}
|
|
}
|
|
|
|
// Docker disk usage
|
|
if (digest.diskUsage) {
|
|
lines.push(`-- Docker Disk Usage ${ '-'.repeat(34)}`);
|
|
const du = digest.diskUsage;
|
|
lines.push(` Images: ${formatBytes(du.images.sizeBytes)} (${du.images.count} images)`);
|
|
lines.push(` Containers: ${formatBytes(du.containers.sizeBytes)}`);
|
|
lines.push(` Volumes: ${formatBytes(du.volumes.sizeBytes)} (${du.volumes.count} volumes)`);
|
|
lines.push(` Build Cache: ${formatBytes(du.buildCache.sizeBytes)}`);
|
|
lines.push(` Total: ${du.totalGB} GB`);
|
|
if (du.totalGB > DOCKER.MAINTENANCE.DISK_WARN_GB) {
|
|
lines.push(` WARNING: Exceeds ${DOCKER.MAINTENANCE.DISK_WARN_GB}GB threshold!`);
|
|
lines.push(' Run: docker system prune -a (removes unused images/cache)');
|
|
}
|
|
lines.push('');
|
|
}
|
|
|
|
// Summary
|
|
lines.push(sr);
|
|
lines.push(` ${digest.summary.totalServices} service(s) monitored | ${digest.summary.totalErrors} error(s) | ${digest.summary.totalWarnings} warning(s)`);
|
|
lines.push(` Hours collected: ${digest.hoursCollected}/24`);
|
|
lines.push(hr);
|
|
|
|
return `${lines.join('\n') }\n`;
|
|
}
|
|
|
|
/**
|
|
* Remove digest files older than MAX_DIGEST_FILES days.
|
|
*/
|
|
async _cleanupOldDigests() {
|
|
if (!this.digestDir) return;
|
|
try {
|
|
const files = await fsp.readdir(this.digestDir);
|
|
const digestFiles = files.filter(f => f.startsWith('digest-')).sort();
|
|
// Each date has .log + .json = 2 files per day
|
|
const maxFiles = DOCKER.DIGEST.MAX_DIGEST_FILES * 2;
|
|
if (digestFiles.length > maxFiles) {
|
|
const toDelete = digestFiles.slice(0, digestFiles.length - maxFiles);
|
|
for (const f of toDelete) {
|
|
await fsp.unlink(path.join(this.digestDir, f)).catch(() => {});
|
|
}
|
|
}
|
|
} catch (e) {
|
|
// Directory may not exist yet
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get the latest daily digest (JSON).
|
|
*/
|
|
async getLatestDigest() {
|
|
if (!this.digestDir) return null;
|
|
try {
|
|
const files = await fsp.readdir(this.digestDir);
|
|
const jsonFiles = files.filter(f => f.endsWith('.json')).sort();
|
|
if (jsonFiles.length === 0) return null;
|
|
const latest = path.join(this.digestDir, jsonFiles[jsonFiles.length - 1]);
|
|
return JSON.parse(await fsp.readFile(latest, 'utf8'));
|
|
} catch (e) {
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get digest for a specific date.
|
|
*/
|
|
async getDigestByDate(dateStr) {
|
|
if (!this.digestDir) return null;
|
|
const jsonPath = path.join(this.digestDir, `digest-${dateStr}.json`);
|
|
try {
|
|
return JSON.parse(await fsp.readFile(jsonPath, 'utf8'));
|
|
} catch (e) {
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get the formatted text version of a digest.
|
|
*/
|
|
async getDigestText(dateStr) {
|
|
if (!this.digestDir) return null;
|
|
const logPath = path.join(this.digestDir, `digest-${dateStr}.log`);
|
|
try {
|
|
return await fsp.readFile(logPath, 'utf8');
|
|
} catch (e) {
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* List available digest dates.
|
|
*/
|
|
async listDigests() {
|
|
if (!this.digestDir) return [];
|
|
try {
|
|
const files = await fsp.readdir(this.digestDir);
|
|
return files
|
|
.filter(f => f.endsWith('.json'))
|
|
.map(f => f.replace('digest-', '').replace('.json', ''))
|
|
.sort()
|
|
.reverse();
|
|
} catch (e) {
|
|
return [];
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get live data: current day's accumulated hourly summaries.
|
|
*/
|
|
getLiveData() {
|
|
const today = new Date().toISOString().slice(0, 10);
|
|
const todayHours = this.hourlySummaries.filter(h => h.hour.startsWith(today));
|
|
|
|
// Aggregate
|
|
const serviceAgg = {};
|
|
for (const hour of todayHours) {
|
|
for (const [appId, svc] of Object.entries(hour.services)) {
|
|
if (!serviceAgg[appId]) {
|
|
serviceAgg[appId] = { name: svc.name, appId, totalErrors: 0, totalWarnings: 0, lastState: svc.state, recentErrors: [] };
|
|
}
|
|
serviceAgg[appId].totalErrors += svc.errorCount;
|
|
serviceAgg[appId].totalWarnings += svc.warningCount;
|
|
serviceAgg[appId].lastState = svc.state;
|
|
for (const err of svc.errors) {
|
|
if (serviceAgg[appId].recentErrors.length < 10) {
|
|
serviceAgg[appId].recentErrors.push(err);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
return {
|
|
date: today,
|
|
hoursCollected: todayHours.length,
|
|
lastCollect: this.lastCollect,
|
|
services: serviceAgg,
|
|
};
|
|
}
|
|
|
|
getStatus() {
|
|
return {
|
|
running: this.running,
|
|
lastCollect: this.lastCollect,
|
|
hourlySummaries: this.hourlySummaries.length,
|
|
digestDir: this.digestDir,
|
|
};
|
|
}
|
|
}
|
|
|
|
function formatBytes(bytes) {
|
|
if (bytes === 0) return '0 B';
|
|
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
|
|
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
|
return `${(bytes / Math.pow(1024, i)).toFixed(1) } ${ units[i]}`;
|
|
}
|
|
|
|
module.exports = new LogDigest();
|