289 lines
9.7 KiB
JavaScript
289 lines
9.7 KiB
JavaScript
const express = require('express');
|
|
const fs = require('fs');
|
|
const fsp = require('fs').promises;
|
|
const path = require('path');
|
|
const { exists } = require('../fs-helpers');
|
|
const { paginate, parsePaginationParams } = require('../pagination');
|
|
const { NotFoundError } = require('../errors');
|
|
|
|
/**
|
|
* Logs route factory
|
|
* @param {Object} deps - Explicit dependencies
|
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
|
* @param {Object} deps.docker - Docker client
|
|
* @param {Object} deps.logDigest - Log digest manager (optional)
|
|
* @param {Object} deps.dockerMaintenance - Docker maintenance module (optional)
|
|
* @returns {express.Router}
|
|
*/
|
|
module.exports = function({ asyncHandler, docker, logDigest, dockerMaintenance }) {
|
|
const router = express.Router();
|
|
|
|
// List containers with logs
|
|
router.get('/logs/containers', asyncHandler(async (req, res) => {
|
|
const containers = await docker.client.listContainers({ all: true });
|
|
const containerList = containers.map(c => ({
|
|
id: c.Id.slice(0, 12),
|
|
name: c.Names[0]?.replace(/^\//, '') || 'unknown',
|
|
image: c.Image,
|
|
status: c.State,
|
|
created: c.Created
|
|
}));
|
|
|
|
const paginationParams = parsePaginationParams(req.query);
|
|
const result = paginate(containerList, paginationParams);
|
|
res.json({ success: true, containers: result.data, ...(result.pagination && { pagination: result.pagination }) });
|
|
}, 'logs-containers'));
|
|
|
|
// Get logs for a specific container
|
|
router.get('/logs/container/:id', asyncHandler(async (req, res) => {
|
|
const containerId = req.params.id;
|
|
const tail = parseInt(req.query.tail) || 100;
|
|
const since = req.query.since || 0;
|
|
const timestamps = req.query.timestamps !== 'false';
|
|
|
|
const container = docker.client.getContainer(containerId);
|
|
let info;
|
|
try {
|
|
info = await container.inspect();
|
|
} catch (err) {
|
|
if (err.statusCode === 404 || (err.message && err.message.includes('no such container'))) {
|
|
const { NotFoundError } = require('../errors');
|
|
throw new NotFoundError(`Container ${containerId}`);
|
|
}
|
|
throw err;
|
|
}
|
|
const containerName = info.Name.replace(/^\//, '');
|
|
|
|
const logs = await container.logs({
|
|
stdout: true, stderr: true,
|
|
tail, since, timestamps
|
|
});
|
|
|
|
// Parse Docker log stream (demultiplex stdout/stderr)
|
|
const lines = [];
|
|
let offset = 0;
|
|
const buffer = Buffer.isBuffer(logs) ? logs : Buffer.from(logs);
|
|
|
|
while (offset < buffer.length) {
|
|
if (offset + 8 > buffer.length) break;
|
|
const header = buffer.slice(offset, offset + 8);
|
|
const streamType = header[0];
|
|
const size = header.readUInt32BE(4);
|
|
if (offset + 8 + size > buffer.length) break;
|
|
|
|
const line = buffer.slice(offset + 8, offset + 8 + size).toString('utf8').trim();
|
|
if (line) {
|
|
lines.push({
|
|
stream: streamType === 2 ? 'stderr' : 'stdout',
|
|
text: line
|
|
});
|
|
}
|
|
offset += 8 + size;
|
|
}
|
|
|
|
res.json({
|
|
success: true,
|
|
containerId, containerName,
|
|
logs: lines,
|
|
count: lines.length
|
|
});
|
|
}, 'logs-container'));
|
|
|
|
// Stream logs (SSE)
|
|
router.get('/logs/stream/:id', asyncHandler(async (req, res) => {
|
|
const containerId = req.params.id;
|
|
const container = docker.client.getContainer(containerId);
|
|
try {
|
|
await container.inspect();
|
|
} catch (err) {
|
|
if (err.statusCode === 404 || (err.message && err.message.includes('no such container'))) {
|
|
const { NotFoundError } = require('../errors');
|
|
throw new NotFoundError(`Container ${containerId}`);
|
|
}
|
|
throw err;
|
|
}
|
|
|
|
res.setHeader('Content-Type', 'text/event-stream');
|
|
res.setHeader('Cache-Control', 'no-cache');
|
|
res.setHeader('Connection', 'keep-alive');
|
|
res.setHeader('X-Accel-Buffering', 'no');
|
|
|
|
const logStream = await container.logs({
|
|
stdout: true, stderr: true,
|
|
follow: true, tail: 50, timestamps: true
|
|
});
|
|
|
|
let buffer = Buffer.alloc(0);
|
|
|
|
logStream.on('data', (chunk) => {
|
|
buffer = Buffer.concat([buffer, chunk]);
|
|
|
|
while (buffer.length >= 8) {
|
|
const size = buffer.readUInt32BE(4);
|
|
if (buffer.length < 8 + size) break;
|
|
|
|
const streamType = buffer[0];
|
|
const line = buffer.slice(8, 8 + size).toString('utf8').trim();
|
|
|
|
if (line) {
|
|
const data = JSON.stringify({
|
|
stream: streamType === 2 ? 'stderr' : 'stdout',
|
|
text: line,
|
|
timestamp: new Date().toISOString()
|
|
});
|
|
res.write(`data: ${data}\n\n`);
|
|
}
|
|
|
|
buffer = buffer.slice(8 + size);
|
|
}
|
|
});
|
|
|
|
logStream.on('error', (err) => {
|
|
res.write(`data: ${JSON.stringify({ error: err.message || String(err) })}\n\n`);
|
|
res.end();
|
|
});
|
|
|
|
req.on('close', () => {
|
|
if (logStream.destroy) logStream.destroy();
|
|
});
|
|
}, 'logs-stream'));
|
|
|
|
// Get latest daily digest
|
|
router.get('/logs/digest/latest', asyncHandler(async (req, res) => {
|
|
if (!logDigest) throw new Error('Log digest not available');
|
|
const digest = await logDigest.getLatestDigest();
|
|
if (!digest) {
|
|
return res.json({ success: true, digest: null, message: 'No digest available yet. First digest is generated at midnight.' });
|
|
}
|
|
res.json({ success: true, digest });
|
|
}, 'logs-digest-latest'));
|
|
|
|
// Get live digest data (today's accumulated stats)
|
|
router.get('/logs/digest/live', asyncHandler(async (req, res) => {
|
|
if (!logDigest) throw new Error('Log digest not available');
|
|
const live = logDigest.getLiveData();
|
|
res.json({ success: true, ...live });
|
|
}, 'logs-digest-live'));
|
|
|
|
// List available digest dates
|
|
router.get('/logs/digest/history', asyncHandler(async (req, res) => {
|
|
if (!logDigest) throw new Error('Log digest not available');
|
|
const dates = await logDigest.listDigests();
|
|
res.json({ success: true, dates });
|
|
}, 'logs-digest-history'));
|
|
|
|
// Generate digest on demand (for today or a specific date)
|
|
router.post('/logs/digest/generate', asyncHandler(async (req, res) => {
|
|
if (!logDigest) throw new Error('Log digest not available');
|
|
const date = req.body.date || new Date().toISOString().slice(0, 10);
|
|
const digest = await logDigest.generateDailyDigest(date);
|
|
res.json({ success: true, digest });
|
|
}, 'logs-digest-generate'));
|
|
|
|
// Get digest for a specific date (JSON)
|
|
router.get('/logs/digest/:date', asyncHandler(async (req, res) => {
|
|
if (!logDigest) throw new Error('Log digest not available');
|
|
const { date } = req.params;
|
|
if (!/^\d{4}-\d{2}-\d{2}$/.test(date)) {
|
|
throw new ValidationError('Invalid date format. Use YYYY-MM-DD.');
|
|
}
|
|
const format = req.query.format || 'json';
|
|
if (format === 'text') {
|
|
const text = await logDigest.getDigestText(date);
|
|
if (!text) throw new NotFoundError(`Digest for ${date}`);
|
|
res.setHeader('Content-Type', 'text/plain');
|
|
return res.send(text);
|
|
}
|
|
const digest = await logDigest.getDigestByDate(date);
|
|
if (!digest) throw new NotFoundError(`Digest for ${date}`);
|
|
res.json({ success: true, digest });
|
|
}, 'logs-digest-date'));
|
|
|
|
// Get Docker disk usage snapshot
|
|
router.get('/logs/docker-disk', asyncHandler(async (req, res) => {
|
|
if (!dockerMaintenance) throw new Error('Docker maintenance not available');
|
|
const diskUsage = await dockerMaintenance.getDiskUsage();
|
|
const status = dockerMaintenance.getStatus();
|
|
res.json({ success: true, diskUsage, maintenance: status });
|
|
}, 'logs-docker-disk'));
|
|
|
|
// Trigger Docker maintenance manually
|
|
router.post('/logs/docker-maintenance', asyncHandler(async (req, res) => {
|
|
if (!dockerMaintenance) throw new Error('Docker maintenance not available');
|
|
const result = await dockerMaintenance.runMaintenance();
|
|
res.json({ success: true, result });
|
|
}, 'logs-docker-maintenance'));
|
|
|
|
// Get logs from a file path (for native applications)
|
|
router.get('/logs/file', asyncHandler(async (req, res) => {
|
|
const { path: logPath, tail = 100 } = req.query;
|
|
|
|
if (!logPath) {
|
|
throw new ValidationError('Log path is required');
|
|
}
|
|
|
|
const platformPaths = require('../platform-paths');
|
|
const allowedPaths = platformPaths.allowedLogPaths;
|
|
|
|
const normalizedPath = path.normalize(logPath);
|
|
|
|
// Resolve symlinks to prevent symlink-based traversal
|
|
let resolvedPath;
|
|
try {
|
|
resolvedPath = await fsp.realpath(normalizedPath);
|
|
} catch {
|
|
const { NotFoundError } = require('../errors');
|
|
throw new NotFoundError('Log file');
|
|
}
|
|
|
|
// Check path against allowed roots with separator boundary
|
|
const isAllowed = allowedPaths.some(allowed => {
|
|
const normalizedAllowed = path.normalize(allowed);
|
|
return resolvedPath === normalizedAllowed || resolvedPath.startsWith(normalizedAllowed + path.sep);
|
|
});
|
|
|
|
if (!isAllowed) {
|
|
throw new ForbiddenError('Access to this log path is not allowed');
|
|
}
|
|
|
|
if (!await exists(resolvedPath)) {
|
|
const { NotFoundError } = require('../errors');
|
|
throw new NotFoundError('Log file');
|
|
}
|
|
|
|
const fileContent = await fsp.readFile(resolvedPath, 'utf8');
|
|
const lines = fileContent.split('\n').filter(line => line.trim());
|
|
const tailLines = lines.slice(-tail);
|
|
|
|
const logs = tailLines.map(line => ({
|
|
stream: 'stdout',
|
|
text: line,
|
|
timestamp: extractTimestamp(line)
|
|
}));
|
|
|
|
res.json({
|
|
success: true,
|
|
logPath: normalizedPath,
|
|
logs,
|
|
count: logs.length,
|
|
totalLines: lines.length
|
|
});
|
|
}, 'logs-file'));
|
|
|
|
return router;
|
|
};
|
|
|
|
function extractTimestamp(line) {
|
|
const patterns = [
|
|
/^(\d{4}-\d{2}-\d{2}[T\s]\d{2}:\d{2}:\d{2})/,
|
|
/^(\w{3}\s+\d{1,2},\s+\d{4}\s+\d{2}:\d{2}:\d{2})/,
|
|
/^\[(\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}:\d{2})\]/,
|
|
];
|
|
|
|
for (const pattern of patterns) {
|
|
const match = line.match(pattern);
|
|
if (match) return match[1];
|
|
}
|
|
return null;
|
|
}
|