Files
dashcaddy/dashcaddy-api/routes/logs.js
Sami e615f24627 Add Docker hygiene, deployment manifests, and daily log digest
Prevents Docker disk bloat by adding log rotation (10MB max, 3 files)
to all container creation and update paths, auto-pruning dangling
images after deploy/remove/update, and a daily maintenance module
that cleans build cache and warns on disk thresholds.

Saves a deployment manifest in services.json at deploy time so users
can restore all their apps after a Docker purge. Adds restore-all
and restore-single endpoints that recreate containers, Caddy config,
and DNS records from the saved manifests.

Adds an hourly log collector and daily digest generator that
summarizes errors, warnings, and events across all services into
a single human-readable report with guidance on where to investigate.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-13 21:41:40 -07:00

272 lines
9.0 KiB
JavaScript

const express = require('express');
const fs = require('fs');
const fsp = require('fs').promises;
const path = require('path');
const { exists } = require('../fs-helpers');
const { paginate, parsePaginationParams } = require('../pagination');
module.exports = function(ctx) {
const router = express.Router();
// List containers with logs
router.get('/logs/containers', ctx.asyncHandler(async (req, res) => {
const containers = await ctx.docker.client.listContainers({ all: true });
const containerList = containers.map(c => ({
id: c.Id.slice(0, 12),
name: c.Names[0]?.replace(/^\//, '') || 'unknown',
image: c.Image,
status: c.State,
created: c.Created
}));
const paginationParams = parsePaginationParams(req.query);
const result = paginate(containerList, paginationParams);
res.json({ success: true, containers: result.data, ...(result.pagination && { pagination: result.pagination }) });
}, 'logs-containers'));
// Get logs for a specific container
router.get('/logs/container/:id', ctx.asyncHandler(async (req, res) => {
const containerId = req.params.id;
const tail = parseInt(req.query.tail) || 100;
const since = req.query.since || 0;
const timestamps = req.query.timestamps !== 'false';
const container = ctx.docker.client.getContainer(containerId);
let info;
try {
info = await container.inspect();
} catch (err) {
if (err.statusCode === 404 || (err.message && err.message.includes('no such container'))) {
const { NotFoundError } = require('../errors');
throw new NotFoundError(`Container ${containerId}`);
}
throw err;
}
const containerName = info.Name.replace(/^\//, '');
const logs = await container.logs({
stdout: true, stderr: true,
tail, since, timestamps
});
// Parse Docker log stream (demultiplex stdout/stderr)
const lines = [];
let offset = 0;
const buffer = Buffer.isBuffer(logs) ? logs : Buffer.from(logs);
while (offset < buffer.length) {
if (offset + 8 > buffer.length) break;
const header = buffer.slice(offset, offset + 8);
const streamType = header[0];
const size = header.readUInt32BE(4);
if (offset + 8 + size > buffer.length) break;
const line = buffer.slice(offset + 8, offset + 8 + size).toString('utf8').trim();
if (line) {
lines.push({
stream: streamType === 2 ? 'stderr' : 'stdout',
text: line
});
}
offset += 8 + size;
}
res.json({
success: true,
containerId, containerName,
logs: lines,
count: lines.length
});
}, 'logs-container'));
// Stream logs (SSE)
router.get('/logs/stream/:id', ctx.asyncHandler(async (req, res) => {
const containerId = req.params.id;
const container = ctx.docker.client.getContainer(containerId);
try {
await container.inspect();
} catch (err) {
if (err.statusCode === 404 || (err.message && err.message.includes('no such container'))) {
const { NotFoundError } = require('../errors');
throw new NotFoundError(`Container ${containerId}`);
}
throw err;
}
res.setHeader('Content-Type', 'text/event-stream');
res.setHeader('Cache-Control', 'no-cache');
res.setHeader('Connection', 'keep-alive');
res.setHeader('X-Accel-Buffering', 'no');
const logStream = await container.logs({
stdout: true, stderr: true,
follow: true, tail: 50, timestamps: true
});
let buffer = Buffer.alloc(0);
logStream.on('data', (chunk) => {
buffer = Buffer.concat([buffer, chunk]);
while (buffer.length >= 8) {
const size = buffer.readUInt32BE(4);
if (buffer.length < 8 + size) break;
const streamType = buffer[0];
const line = buffer.slice(8, 8 + size).toString('utf8').trim();
if (line) {
const data = JSON.stringify({
stream: streamType === 2 ? 'stderr' : 'stdout',
text: line,
timestamp: new Date().toISOString()
});
res.write(`data: ${data}\n\n`);
}
buffer = buffer.slice(8 + size);
}
});
logStream.on('error', (err) => {
res.write(`data: ${JSON.stringify({ error: ctx.safeErrorMessage(err) })}\n\n`);
res.end();
});
req.on('close', () => {
if (logStream.destroy) logStream.destroy();
});
}, 'logs-stream'));
// Get latest daily digest
router.get('/logs/digest/latest', ctx.asyncHandler(async (req, res) => {
const digest = await ctx.logDigest.getLatestDigest();
if (!digest) {
return res.json({ success: true, digest: null, message: 'No digest available yet. First digest is generated at midnight.' });
}
res.json({ success: true, digest });
}, 'logs-digest-latest'));
// Get live digest data (today's accumulated stats)
router.get('/logs/digest/live', ctx.asyncHandler(async (req, res) => {
const live = ctx.logDigest.getLiveData();
res.json({ success: true, ...live });
}, 'logs-digest-live'));
// List available digest dates
router.get('/logs/digest/history', ctx.asyncHandler(async (req, res) => {
const dates = await ctx.logDigest.listDigests();
res.json({ success: true, dates });
}, 'logs-digest-history'));
// Generate digest on demand (for today or a specific date)
router.post('/logs/digest/generate', ctx.asyncHandler(async (req, res) => {
const date = req.body.date || new Date().toISOString().slice(0, 10);
const digest = await ctx.logDigest.generateDailyDigest(date);
res.json({ success: true, digest });
}, 'logs-digest-generate'));
// Get digest for a specific date (JSON)
router.get('/logs/digest/:date', ctx.asyncHandler(async (req, res) => {
const { date } = req.params;
if (!/^\d{4}-\d{2}-\d{2}$/.test(date)) {
return ctx.errorResponse(res, 400, 'Invalid date format. Use YYYY-MM-DD.');
}
const format = req.query.format || 'json';
if (format === 'text') {
const text = await ctx.logDigest.getDigestText(date);
if (!text) return ctx.errorResponse(res, 404, `No digest found for ${date}`);
res.setHeader('Content-Type', 'text/plain');
return res.send(text);
}
const digest = await ctx.logDigest.getDigestByDate(date);
if (!digest) return ctx.errorResponse(res, 404, `No digest found for ${date}`);
res.json({ success: true, digest });
}, 'logs-digest-date'));
// Get Docker disk usage snapshot
router.get('/logs/docker-disk', ctx.asyncHandler(async (req, res) => {
const diskUsage = await ctx.dockerMaintenance.getDiskUsage();
const status = ctx.dockerMaintenance.getStatus();
res.json({ success: true, diskUsage, maintenance: status });
}, 'logs-docker-disk'));
// Trigger Docker maintenance manually
router.post('/logs/docker-maintenance', ctx.asyncHandler(async (req, res) => {
const result = await ctx.dockerMaintenance.runMaintenance();
res.json({ success: true, result });
}, 'logs-docker-maintenance'));
// Get logs from a file path (for native applications)
router.get('/logs/file', ctx.asyncHandler(async (req, res) => {
const { path: logPath, tail = 100 } = req.query;
if (!logPath) {
return ctx.errorResponse(res, 400, 'Log path is required');
}
const platformPaths = require('../platform-paths');
const allowedPaths = platformPaths.allowedLogPaths;
const normalizedPath = path.normalize(logPath);
// Resolve symlinks to prevent symlink-based traversal
let resolvedPath;
try {
resolvedPath = await fsp.realpath(normalizedPath);
} catch {
const { NotFoundError } = require('../errors');
throw new NotFoundError('Log file');
}
// Check path against allowed roots with separator boundary
const isAllowed = allowedPaths.some(allowed => {
const normalizedAllowed = path.normalize(allowed);
return resolvedPath === normalizedAllowed || resolvedPath.startsWith(normalizedAllowed + path.sep);
});
if (!isAllowed) {
return ctx.errorResponse(res, 403, 'Access to this log path is not allowed');
}
if (!await exists(resolvedPath)) {
const { NotFoundError } = require('../errors');
throw new NotFoundError('Log file');
}
const fileContent = await fsp.readFile(resolvedPath, 'utf8');
const lines = fileContent.split('\n').filter(line => line.trim());
const tailLines = lines.slice(-tail);
const logs = tailLines.map(line => ({
stream: 'stdout',
text: line,
timestamp: extractTimestamp(line)
}));
res.json({
success: true,
logPath: normalizedPath,
logs,
count: logs.length,
totalLines: lines.length
});
}, 'logs-file'));
return router;
};
function extractTimestamp(line) {
const patterns = [
/^(\d{4}-\d{2}-\d{2}[T\s]\d{2}:\d{2}:\d{2})/,
/^(\w{3}\s+\d{1,2},\s+\d{4}\s+\d{2}:\d{2}:\d{2})/,
/^\[(\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}:\d{2})\]/,
];
for (const pattern of patterns) {
const match = line.match(pattern);
if (match) return match[1];
}
return null;
}