refactor(utils): Extract utilities from server.js

- Create src/utils/http.js - fetchT and HTTP helpers
- Create src/utils/logging.js - Structured logging and error logging
- Create src/utils/responses.js - Standard API responses
- Create src/utils/async-handler.js - Async wrapper with error handling
- Create src/utils/index.js - Consolidated exports

Removes scattered helper functions from server.js
This commit is contained in:
Krystie
2026-03-29 19:40:18 -07:00
parent 173dafa2f3
commit fa7a78388a
5 changed files with 276 additions and 0 deletions

View File

@@ -0,0 +1,30 @@
/**
* Async handler wrapper - Eliminates try/catch boilerplate
*/
const { AppError } = require('../../errors');
/**
* Wrap async route handlers - catches errors and logs them
*/
function asyncHandler(logError, fn, context) {
return async (req, res, next) => {
try {
await fn(req, res, next);
} catch (error) {
// Let typed errors propagate to global error handler
if (error instanceof AppError) {
return next(error);
}
await logError(context || req.path, error);
if (!res.headersSent) {
const { errorResponse } = require('./responses');
const { safeErrorMessage } = require('./logging');
errorResponse(res, 500, safeErrorMessage(error));
}
}
};
}
module.exports = { asyncHandler };

View File

@@ -0,0 +1,80 @@
/**
* HTTP utilities - Fetch helpers and HTTP operations
*/
const http = require('http');
const { TIMEOUTS } = require('../../constants');
/**
* Fetch with automatic timeout
* Drop-in replacement for fetch() with AbortSignal timeout
*/
function fetchT(url, opts = {}, timeoutMs = TIMEOUTS.HTTP_DEFAULT) {
// Caddy admin API rejects Node.js undici fetch - use raw http.request
if (url.includes(':2019')) {
return _httpFetch(url, opts, timeoutMs);
}
if (!opts.signal) {
opts = { ...opts, signal: AbortSignal.timeout(timeoutMs) };
}
delete opts.timeout;
return fetch(url, opts);
}
/**
* Raw http.request wrapper for Caddy admin API
*/
function _httpFetch(url, opts = {}, timeoutMs = TIMEOUTS.HTTP_DEFAULT) {
return new Promise((resolve, reject) => {
const parsed = new URL(url);
const options = {
hostname: parsed.hostname,
port: parsed.port || 2019,
path: parsed.pathname + parsed.search,
method: (opts.method || 'GET').toUpperCase(),
headers: { ...opts.headers },
timeout: timeoutMs,
};
if (opts.body) {
options.headers['Content-Length'] = Buffer.byteLength(opts.body);
}
const MAX_RESPONSE_SIZE = 10 * 1024 * 1024; // 10MB
const req = http.request(options, (res) => {
let data = '';
let size = 0;
res.on('data', chunk => {
size += chunk.length;
if (size > MAX_RESPONSE_SIZE) {
res.destroy();
reject(new Error(`Response from ${url} exceeded ${MAX_RESPONSE_SIZE} bytes`));
return;
}
data += chunk;
});
res.on('end', () => {
resolve({
ok: res.statusCode >= 200 && res.statusCode < 300,
status: res.statusCode,
statusText: res.statusMessage,
json: () => Promise.resolve(JSON.parse(data)),
text: () => Promise.resolve(data),
headers: { get: (k) => res.headers[k.toLowerCase()] },
});
});
});
req.on('timeout', () => {
req.destroy();
reject(new Error(`Request to ${url} timed out after ${timeoutMs}ms`));
});
req.on('error', reject);
if (opts.body) req.write(opts.body);
req.end();
});
}
module.exports = { fetchT };

View File

@@ -0,0 +1,25 @@
/**
* Utilities index - Re-export all utility modules
*/
const { fetchT } = require('./http');
const { LOG_LEVELS, createLogger, logError, safeErrorMessage } = require('./logging');
const { errorResponse, ok } = require('./responses');
const { asyncHandler } = require('./async-handler');
module.exports = {
// HTTP
fetchT,
// Logging
LOG_LEVELS,
createLogger,
logError,
safeErrorMessage,
// Responses
errorResponse,
ok,
// Async handling
asyncHandler,
};

View File

@@ -0,0 +1,119 @@
/**
* Logging utilities - Structured logging and error handling
*/
const fsp = require('fs').promises;
const path = require('path');
const LOG_LEVELS = { debug: 0, info: 1, warn: 2, error: 3 };
/**
* Create a structured logger
*/
function createLogger(LOG_LEVEL) {
function log(level, context, message, data = {}) {
if (LOG_LEVELS[level] < LOG_LEVEL) return;
const entry = {
t: new Date().toISOString(),
level,
ctx: context,
msg: message,
};
if (Object.keys(data).length) entry.data = data;
const fn = level === 'error' ? console.error : level === 'warn' ? console.warn : console.info;
fn(JSON.stringify(entry));
}
log.info = (ctx, msg, data) => log('info', ctx, msg, data);
log.warn = (ctx, msg, data) => log('warn', ctx, msg, data);
log.error = (ctx, msg, data) => log('error', ctx, msg, data);
log.debug = (ctx, msg, data) => log('debug', ctx, msg, data);
return log;
}
/**
* Enhanced error logging with context tracking
*/
async function logError(ERROR_LOG_FILE, MAX_ERROR_LOG_SIZE, context, error, additionalInfo = {}, log) {
const timestamp = new Date().toISOString();
// Extract request context
const requestContext = {};
if (additionalInfo.req) {
const req = additionalInfo.req;
const clientIP = req.ip || req.socket?.remoteAddress || '';
requestContext.requestId = req.id;
requestContext.ip = clientIP;
requestContext.userAgent = req.get('user-agent');
requestContext.method = req.method;
requestContext.path = req.path;
delete additionalInfo.req;
}
const logEntry = {
timestamp,
context,
...requestContext,
error: {
message: error.message || error,
stack: error.stack,
code: error.code
},
...additionalInfo
};
const contextInfo = Object.keys(requestContext).length > 0
? `\nRequest Context: ${JSON.stringify(requestContext, null, 2)}`
: '';
const logLine = `[${timestamp}] ${context}: ${error.message || error}\n${error.stack || ''}${contextInfo}\nAdditional Info: ${JSON.stringify(additionalInfo, null, 2)}\n${'='.repeat(80)}\n`;
try {
// Rotate log if it exceeds max size
try {
const stats = await fsp.stat(ERROR_LOG_FILE);
if (stats.size > MAX_ERROR_LOG_SIZE) {
const rotated = ERROR_LOG_FILE + '.1';
const exists = await fsp.access(rotated).then(() => true).catch(() => false);
if (exists) await fsp.unlink(rotated);
await fsp.rename(ERROR_LOG_FILE, rotated);
}
} catch (_) { /* file may not exist yet */ }
await fsp.appendFile(ERROR_LOG_FILE, logLine);
} catch (e) {
if (log && log.error) {
log.error('errorlog', 'Failed to write to error log', { error: e.message });
}
}
}
/**
* Return a safe error message without leaking internals
*/
function safeErrorMessage(error) {
const msg = error.message || String(error);
// Detect port conflict errors
const portMatch = msg.match(/exposing port TCP [^:]*:(\d+)/);
if (portMatch || msg.includes('port is already allocated') || msg.includes('ports are not available')) {
const port = portMatch ? portMatch[1] : 'requested';
return `[DC-200] Port ${port} is already in use. Try a different port or stop the service using that port first.`;
}
// Only expose short, user-facing messages
if (msg.length < 200 && !msg.includes('/') && !msg.includes('\\') && !msg.includes(' at ')) {
return msg;
}
return 'An internal error occurred';
}
module.exports = {
LOG_LEVELS,
createLogger,
logError,
safeErrorMessage,
};

View File

@@ -0,0 +1,22 @@
/**
* Response helpers - Standard API response formats
*/
/**
* Standard error response
*/
function errorResponse(res, statusCode, message, extras = {}) {
return res.status(statusCode).json({ success: false, error: message, ...extras });
}
/**
* Standard success response
*/
function ok(res, data = {}) {
return res.json({ success: true, ...data });
}
module.exports = {
errorResponse,
ok,
};