Full codebase including API server (32 modules + routes), dashboard frontend, DashCA certificate distribution, installer script, and deployment skills.
183 lines
5.4 KiB
JavaScript
183 lines
5.4 KiB
JavaScript
const express = require('express');
|
|
const fs = require('fs');
|
|
const fsp = require('fs').promises;
|
|
const path = require('path');
|
|
const { exists } = require('../fs-helpers');
|
|
const { paginate, parsePaginationParams } = require('../pagination');
|
|
|
|
module.exports = function(ctx) {
|
|
const router = express.Router();
|
|
|
|
// List containers with logs
|
|
router.get('/logs/containers', ctx.asyncHandler(async (req, res) => {
|
|
const containers = await ctx.docker.client.listContainers({ all: true });
|
|
const containerList = containers.map(c => ({
|
|
id: c.Id.slice(0, 12),
|
|
name: c.Names[0]?.replace(/^\//, '') || 'unknown',
|
|
image: c.Image,
|
|
status: c.State,
|
|
created: c.Created
|
|
}));
|
|
|
|
const paginationParams = parsePaginationParams(req.query);
|
|
const result = paginate(containerList, paginationParams);
|
|
res.json({ success: true, containers: result.data, ...(result.pagination && { pagination: result.pagination }) });
|
|
}, 'logs-containers'));
|
|
|
|
// Get logs for a specific container
|
|
router.get('/logs/container/:id', ctx.asyncHandler(async (req, res) => {
|
|
const containerId = req.params.id;
|
|
const tail = parseInt(req.query.tail) || 100;
|
|
const since = req.query.since || 0;
|
|
const timestamps = req.query.timestamps !== 'false';
|
|
|
|
const container = ctx.docker.client.getContainer(containerId);
|
|
const info = await container.inspect();
|
|
const containerName = info.Name.replace(/^\//, '');
|
|
|
|
const logs = await container.logs({
|
|
stdout: true, stderr: true,
|
|
tail, since, timestamps
|
|
});
|
|
|
|
// Parse Docker log stream (demultiplex stdout/stderr)
|
|
const lines = [];
|
|
let offset = 0;
|
|
const buffer = Buffer.isBuffer(logs) ? logs : Buffer.from(logs);
|
|
|
|
while (offset < buffer.length) {
|
|
if (offset + 8 > buffer.length) break;
|
|
const header = buffer.slice(offset, offset + 8);
|
|
const streamType = header[0];
|
|
const size = header.readUInt32BE(4);
|
|
if (offset + 8 + size > buffer.length) break;
|
|
|
|
const line = buffer.slice(offset + 8, offset + 8 + size).toString('utf8').trim();
|
|
if (line) {
|
|
lines.push({
|
|
stream: streamType === 2 ? 'stderr' : 'stdout',
|
|
text: line
|
|
});
|
|
}
|
|
offset += 8 + size;
|
|
}
|
|
|
|
res.json({
|
|
success: true,
|
|
containerId, containerName,
|
|
logs: lines,
|
|
count: lines.length
|
|
});
|
|
}, 'logs-container'));
|
|
|
|
// Stream logs (SSE)
|
|
router.get('/logs/stream/:id', ctx.asyncHandler(async (req, res) => {
|
|
const containerId = req.params.id;
|
|
const container = ctx.docker.client.getContainer(containerId);
|
|
|
|
res.setHeader('Content-Type', 'text/event-stream');
|
|
res.setHeader('Cache-Control', 'no-cache');
|
|
res.setHeader('Connection', 'keep-alive');
|
|
res.setHeader('X-Accel-Buffering', 'no');
|
|
|
|
const logStream = await container.logs({
|
|
stdout: true, stderr: true,
|
|
follow: true, tail: 50, timestamps: true
|
|
});
|
|
|
|
let buffer = Buffer.alloc(0);
|
|
|
|
logStream.on('data', (chunk) => {
|
|
buffer = Buffer.concat([buffer, chunk]);
|
|
|
|
while (buffer.length >= 8) {
|
|
const size = buffer.readUInt32BE(4);
|
|
if (buffer.length < 8 + size) break;
|
|
|
|
const streamType = buffer[0];
|
|
const line = buffer.slice(8, 8 + size).toString('utf8').trim();
|
|
|
|
if (line) {
|
|
const data = JSON.stringify({
|
|
stream: streamType === 2 ? 'stderr' : 'stdout',
|
|
text: line,
|
|
timestamp: new Date().toISOString()
|
|
});
|
|
res.write(`data: ${data}\n\n`);
|
|
}
|
|
|
|
buffer = buffer.slice(8 + size);
|
|
}
|
|
});
|
|
|
|
logStream.on('error', (err) => {
|
|
res.write(`data: ${JSON.stringify({ error: ctx.safeErrorMessage(err) })}\n\n`);
|
|
res.end();
|
|
});
|
|
|
|
req.on('close', () => {
|
|
if (logStream.destroy) logStream.destroy();
|
|
});
|
|
}, 'logs-stream'));
|
|
|
|
// Get logs from a file path (for native applications)
|
|
router.get('/logs/file', ctx.asyncHandler(async (req, res) => {
|
|
const { path: logPath, tail = 100 } = req.query;
|
|
|
|
if (!logPath) {
|
|
return ctx.errorResponse(res, 400, 'Log path is required');
|
|
}
|
|
|
|
const platformPaths = require('../platform-paths');
|
|
const allowedPaths = platformPaths.allowedLogPaths;
|
|
|
|
const normalizedPath = path.normalize(logPath);
|
|
const isAllowed = allowedPaths.some(allowed =>
|
|
normalizedPath.startsWith(path.normalize(allowed))
|
|
);
|
|
|
|
if (!isAllowed) {
|
|
return ctx.errorResponse(res, 403, 'Access to this log path is not allowed');
|
|
}
|
|
|
|
if (!await exists(normalizedPath)) {
|
|
const { NotFoundError } = require('../errors');
|
|
throw new NotFoundError('Log file');
|
|
}
|
|
|
|
const fileContent = await fsp.readFile(normalizedPath, 'utf8');
|
|
const lines = fileContent.split('\n').filter(line => line.trim());
|
|
const tailLines = lines.slice(-tail);
|
|
|
|
const logs = tailLines.map(line => ({
|
|
stream: 'stdout',
|
|
text: line,
|
|
timestamp: extractTimestamp(line)
|
|
}));
|
|
|
|
res.json({
|
|
success: true,
|
|
logPath: normalizedPath,
|
|
logs,
|
|
count: logs.length,
|
|
totalLines: lines.length
|
|
});
|
|
}, 'logs-file'));
|
|
|
|
return router;
|
|
};
|
|
|
|
function extractTimestamp(line) {
|
|
const patterns = [
|
|
/^(\d{4}-\d{2}-\d{2}[T\s]\d{2}:\d{2}:\d{2})/,
|
|
/^(\w{3}\s+\d{1,2},\s+\d{4}\s+\d{2}:\d{2}:\d{2})/,
|
|
/^\[(\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}:\d{2})\]/,
|
|
];
|
|
|
|
for (const pattern of patterns) {
|
|
const match = line.match(pattern);
|
|
if (match) return match[1];
|
|
}
|
|
return null;
|
|
}
|