Merge Phase 2.1: Server.js refactor (88% reduction, modular structure)
This commit is contained in:
5
dashcaddy-api/.eslintignore
Normal file
5
dashcaddy-api/.eslintignore
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
node_modules/
|
||||||
|
coverage/
|
||||||
|
dist/
|
||||||
|
build/
|
||||||
|
*.min.js
|
||||||
57
dashcaddy-api/.eslintrc.js
Normal file
57
dashcaddy-api/.eslintrc.js
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
module.exports = {
|
||||||
|
env: {
|
||||||
|
node: true,
|
||||||
|
es2021: true,
|
||||||
|
},
|
||||||
|
extends: 'eslint:recommended',
|
||||||
|
parserOptions: {
|
||||||
|
ecmaVersion: 'latest',
|
||||||
|
sourceType: 'commonjs',
|
||||||
|
},
|
||||||
|
rules: {
|
||||||
|
// Error Prevention
|
||||||
|
'no-unused-vars': ['warn', { argsIgnorePattern: '^_', varsIgnorePattern: '^_' }],
|
||||||
|
'no-console': 'off', // We use structured logging, but console is okay for debug
|
||||||
|
'no-undef': 'error',
|
||||||
|
'no-unreachable': 'error',
|
||||||
|
'no-constant-condition': ['error', { checkLoops: false }],
|
||||||
|
|
||||||
|
// Code Quality
|
||||||
|
'prefer-const': 'warn',
|
||||||
|
'no-var': 'warn',
|
||||||
|
'eqeqeq': ['warn', 'always', { null: 'ignore' }],
|
||||||
|
'curly': ['warn', 'multi-line'],
|
||||||
|
'no-throw-literal': 'error',
|
||||||
|
|
||||||
|
// Async/Await
|
||||||
|
'require-await': 'warn',
|
||||||
|
'no-async-promise-executor': 'error',
|
||||||
|
'no-await-in-loop': 'off', // Sometimes intentional for sequential operations
|
||||||
|
|
||||||
|
// Style (Prettier handles formatting, these are semantic)
|
||||||
|
'consistent-return': 'off', // Express routes don't always return
|
||||||
|
'no-nested-ternary': 'warn',
|
||||||
|
'max-depth': ['warn', 4],
|
||||||
|
'complexity': ['warn', 20],
|
||||||
|
|
||||||
|
// Prevent common pitfalls
|
||||||
|
'no-eval': 'error',
|
||||||
|
'no-implied-eval': 'error',
|
||||||
|
'no-new-func': 'error',
|
||||||
|
'no-with': 'error',
|
||||||
|
'no-proto': 'error',
|
||||||
|
},
|
||||||
|
overrides: [
|
||||||
|
{
|
||||||
|
// Test files can be more lenient
|
||||||
|
files: ['**/__tests__/**/*.js', '**/*.test.js', '**/*.spec.js'],
|
||||||
|
env: {
|
||||||
|
jest: true,
|
||||||
|
},
|
||||||
|
rules: {
|
||||||
|
'no-unused-expressions': 'off',
|
||||||
|
'max-depth': 'off',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
6
dashcaddy-api/.prettierignore
Normal file
6
dashcaddy-api/.prettierignore
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
node_modules/
|
||||||
|
coverage/
|
||||||
|
dist/
|
||||||
|
build/
|
||||||
|
package-lock.json
|
||||||
|
*.min.js
|
||||||
10
dashcaddy-api/.prettierrc
Normal file
10
dashcaddy-api/.prettierrc
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"semi": true,
|
||||||
|
"singleQuote": true,
|
||||||
|
"trailingComma": "es5",
|
||||||
|
"printWidth": 120,
|
||||||
|
"tabWidth": 2,
|
||||||
|
"useTabs": false,
|
||||||
|
"arrowParens": "avoid",
|
||||||
|
"endOfLine": "lf"
|
||||||
|
}
|
||||||
@@ -9,6 +9,7 @@ COPY package*.json ./
|
|||||||
RUN npm install --production
|
RUN npm install --production
|
||||||
|
|
||||||
COPY *.js ./
|
COPY *.js ./
|
||||||
|
COPY src/ ./src/
|
||||||
COPY routes/ ./routes/
|
COPY routes/ ./routes/
|
||||||
COPY openapi.yaml ./
|
COPY openapi.yaml ./
|
||||||
|
|
||||||
|
|||||||
4
dashcaddy-api/package-lock.json
generated
4
dashcaddy-api/package-lock.json
generated
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "dashcaddy-api",
|
"name": "dashcaddy-api",
|
||||||
"version": "1.1.0",
|
"version": "1.1.5",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "dashcaddy-api",
|
"name": "dashcaddy-api",
|
||||||
"version": "1.1.0",
|
"version": "1.1.5",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"compression": "^1.8.1",
|
"compression": "^1.8.1",
|
||||||
"cors": "^2.8.6",
|
"cors": "^2.8.6",
|
||||||
|
|||||||
@@ -6,30 +6,57 @@ const { REGEX, DOCKER } = require('../../constants');
|
|||||||
const { isValidPort } = require('../../input-validator');
|
const { isValidPort } = require('../../input-validator');
|
||||||
const { exists } = require('../../fs-helpers');
|
const { exists } = require('../../fs-helpers');
|
||||||
const platformPaths = require('../../platform-paths');
|
const platformPaths = require('../../platform-paths');
|
||||||
const { ValidationError } = require('../errors');
|
const { ValidationError } = require('../../errors');
|
||||||
|
const { logError } = require('../../src/utils/logging');
|
||||||
|
/**
|
||||||
|
* Apps deployment routes factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.docker - Docker client wrapper
|
||||||
|
* @param {Object} deps.caddy - Caddy client
|
||||||
|
* @param {Object} deps.credentialManager - Credential manager
|
||||||
|
* @param {Object} deps.servicesStateManager - Services state manager
|
||||||
|
* @param {Object} deps.portLockManager - Port lock manager
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Function} deps.errorResponse - Error response helper
|
||||||
|
* @param {Object} deps.log - Logger instance
|
||||||
|
* @param {Object} deps.helpers - Apps helpers module
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
|
||||||
module.exports = function(ctx, helpers) {
|
module.exports = function({ docker, caddy, credentialManager, servicesStateManager, portLockManager, asyncHandler, errorResponse, log, helpers, APP_TEMPLATES, siteConfig, buildDomain, buildServiceUrl, addServiceToConfig, dns, notification, safeErrorMessage }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
|
// Ctx shim for backward compatibility with existing route code
|
||||||
|
const ctx = {
|
||||||
|
APP_TEMPLATES,
|
||||||
|
siteConfig,
|
||||||
|
buildDomain,
|
||||||
|
buildServiceUrl,
|
||||||
|
addServiceToConfig,
|
||||||
|
dns,
|
||||||
|
notification,
|
||||||
|
safeErrorMessage
|
||||||
|
};
|
||||||
|
|
||||||
async function deployDashCAStaticSite(template, userConfig) {
|
async function deployDashCAStaticSite(template, userConfig) {
|
||||||
const destPath = platformPaths.caCertDir;
|
const destPath = platformPaths.caCertDir;
|
||||||
try {
|
try {
|
||||||
ctx.log.info('deploy', 'DashCA: Starting static site deployment');
|
log.info('deploy', 'DashCA: Starting static site deployment');
|
||||||
if (!await exists(destPath)) {
|
if (!await exists(destPath)) {
|
||||||
await fsp.mkdir(destPath, { recursive: true });
|
await fsp.mkdir(destPath, { recursive: true });
|
||||||
ctx.log.info('deploy', 'DashCA: Created destination directory', { path: destPath });
|
log.info('deploy', 'DashCA: Created destination directory', { path: destPath });
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.log.info('deploy', 'DashCA: Verifying certificate files');
|
log.info('deploy', 'DashCA: Verifying certificate files');
|
||||||
const rootCertExists = await exists(`${destPath}/root.crt`);
|
const rootCertExists = await exists(`${destPath}/root.crt`);
|
||||||
const intermediateCertExists = await exists(`${destPath}/intermediate.crt`);
|
const intermediateCertExists = await exists(`${destPath}/intermediate.crt`);
|
||||||
if (rootCertExists) ctx.log.info('deploy', 'DashCA: Root certificate found');
|
if (rootCertExists) log.info('deploy', 'DashCA: Root certificate found');
|
||||||
else ctx.log.warn('deploy', 'DashCA: Root certificate not found', { expected: path.join(destPath, 'root.crt') });
|
else log.warn('deploy', 'DashCA: Root certificate not found', { expected: path.join(destPath, 'root.crt') });
|
||||||
if (intermediateCertExists) ctx.log.info('deploy', 'DashCA: Intermediate certificate found');
|
if (intermediateCertExists) log.info('deploy', 'DashCA: Intermediate certificate found');
|
||||||
|
|
||||||
const indexPath = path.join(destPath, 'index.html');
|
const indexPath = path.join(destPath, 'index.html');
|
||||||
if (!await exists(indexPath)) {
|
if (!await exists(indexPath)) {
|
||||||
ctx.log.info('deploy', 'DashCA: Creating minimal landing page');
|
log.info('deploy', 'DashCA: Creating minimal landing page');
|
||||||
const minimalHtml = `<!DOCTYPE html>
|
const minimalHtml = `<!DOCTYPE html>
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
@@ -58,15 +85,15 @@ module.exports = function(ctx, helpers) {
|
|||||||
</body>
|
</body>
|
||||||
</html>`;
|
</html>`;
|
||||||
await fsp.writeFile(indexPath, minimalHtml);
|
await fsp.writeFile(indexPath, minimalHtml);
|
||||||
ctx.log.info('deploy', 'DashCA: Created minimal landing page');
|
log.info('deploy', 'DashCA: Created minimal landing page');
|
||||||
} else {
|
} else {
|
||||||
ctx.log.info('deploy', 'DashCA: Using existing index.html');
|
log.info('deploy', 'DashCA: Using existing index.html');
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.log.info('deploy', 'DashCA: For full features, copy certificate files to ' + destPath);
|
log.info('deploy', 'DashCA: For full features, copy certificate files to ' + destPath);
|
||||||
ctx.log.info('deploy', 'DashCA: Static site deployment completed successfully');
|
log.info('deploy', 'DashCA: Static site deployment completed successfully');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
ctx.log.error('deploy', 'DashCA deployment error', { error: error.message });
|
log.error('deploy', 'DashCA deployment error', { error: error.message });
|
||||||
throw new Error(`DashCA deployment failed: ${error.message}`);
|
throw new Error(`DashCA deployment failed: ${error.message}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -82,9 +109,9 @@ module.exports = function(ctx, helpers) {
|
|||||||
|
|
||||||
let lockId = null;
|
let lockId = null;
|
||||||
try {
|
try {
|
||||||
ctx.log.info('deploy', 'Acquiring port locks', { ports: requestedPorts });
|
log.info('deploy', 'Acquiring port locks', { ports: requestedPorts });
|
||||||
lockId = await ctx.portLockManager.acquirePorts(requestedPorts);
|
lockId = await portLockManager.acquirePorts(requestedPorts);
|
||||||
ctx.log.info('deploy', 'Port locks acquired', { lockId });
|
log.info('deploy', 'Port locks acquired', { lockId });
|
||||||
} catch (lockError) {
|
} catch (lockError) {
|
||||||
throw new Error(`Failed to acquire port locks: ${lockError.message}`);
|
throw new Error(`Failed to acquire port locks: ${lockError.message}`);
|
||||||
}
|
}
|
||||||
@@ -92,9 +119,9 @@ module.exports = function(ctx, helpers) {
|
|||||||
try {
|
try {
|
||||||
// Remove stale container with same name
|
// Remove stale container with same name
|
||||||
try {
|
try {
|
||||||
const existingContainer = ctx.docker.client.getContainer(containerName);
|
const existingContainer = docker.client.getContainer(containerName);
|
||||||
const info = await existingContainer.inspect();
|
const info = await existingContainer.inspect();
|
||||||
ctx.log.info('docker', 'Removing stale container', { containerName, status: info.State.Status });
|
log.info('docker', 'Removing stale container', { containerName, status: info.State.Status });
|
||||||
await existingContainer.remove({ force: true });
|
await existingContainer.remove({ force: true });
|
||||||
await new Promise(r => setTimeout(r, 2000));
|
await new Promise(r => setTimeout(r, 2000));
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
@@ -144,43 +171,43 @@ module.exports = function(ctx, helpers) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
ctx.log.info('docker', 'Pulling image', { image: processedTemplate.docker.image });
|
log.info('docker', 'Pulling image', { image: processedTemplate.docker.image });
|
||||||
await ctx.docker.pull(processedTemplate.docker.image);
|
await docker.pull(processedTemplate.docker.image);
|
||||||
ctx.log.info('docker', 'Image pulled successfully', { image: processedTemplate.docker.image });
|
log.info('docker', 'Image pulled successfully', { image: processedTemplate.docker.image });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('docker', 'Image pull failed, checking if local image exists', { image: processedTemplate.docker.image, error: e.message });
|
log.warn('docker', 'Image pull failed, checking if local image exists', { image: processedTemplate.docker.image, error: e.message });
|
||||||
try {
|
try {
|
||||||
const images = await ctx.docker.client.listImages({ filters: { reference: [processedTemplate.docker.image] } });
|
const images = await docker.client.listImages({ filters: { reference: [processedTemplate.docker.image] } });
|
||||||
if (images.length === 0) throw new Error(`[DC-201] Image ${processedTemplate.docker.image} not found locally and pull failed: ${e.message}`);
|
if (images.length === 0) throw new Error(`[DC-201] Image ${processedTemplate.docker.image} not found locally and pull failed: ${e.message}`);
|
||||||
ctx.log.info('docker', 'Using existing local image', { image: processedTemplate.docker.image });
|
log.info('docker', 'Using existing local image', { image: processedTemplate.docker.image });
|
||||||
} catch (listError) {
|
} catch (listError) {
|
||||||
throw new Error(`[DC-201] Failed to pull or find image ${processedTemplate.docker.image}: ${e.message}`);
|
throw new Error(`[DC-201] Failed to pull or find image ${processedTemplate.docker.image}: ${e.message}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const container = await ctx.docker.client.createContainer(containerConfig);
|
const container = await docker.client.createContainer(containerConfig);
|
||||||
await container.start();
|
await container.start();
|
||||||
|
|
||||||
// Prune dangling images to prevent disk bloat
|
// Prune dangling images to prevent disk bloat
|
||||||
try {
|
try {
|
||||||
const pruneResult = await ctx.docker.client.pruneImages({ filters: { dangling: { true: true } } });
|
const pruneResult = await docker.client.pruneImages({ filters: { dangling: { true: true } } });
|
||||||
if (pruneResult.SpaceReclaimed > 0) {
|
if (pruneResult.SpaceReclaimed > 0) {
|
||||||
ctx.log.info('docker', 'Pruned dangling images after deploy', { spaceReclaimed: Math.round(pruneResult.SpaceReclaimed / 1024 / 1024) + 'MB' });
|
log.info('docker', 'Pruned dangling images after deploy', { spaceReclaimed: Math.round(pruneResult.SpaceReclaimed / 1024 / 1024) + 'MB' });
|
||||||
}
|
}
|
||||||
} catch (pruneErr) {
|
} catch (pruneErr) {
|
||||||
ctx.log.debug('docker', 'Image prune after deploy failed', { error: pruneErr.message });
|
log.debug('docker', 'Image prune after deploy failed', { error: pruneErr.message });
|
||||||
}
|
}
|
||||||
|
|
||||||
await ctx.portLockManager.releasePorts(lockId);
|
await portLockManager.releasePorts(lockId);
|
||||||
ctx.log.info('deploy', 'Port locks released', { lockId });
|
log.info('deploy', 'Port locks released', { lockId });
|
||||||
return container.id;
|
return container.id;
|
||||||
} catch (deployError) {
|
} catch (deployError) {
|
||||||
if (lockId) {
|
if (lockId) {
|
||||||
try {
|
try {
|
||||||
await ctx.portLockManager.releasePorts(lockId);
|
await portLockManager.releasePorts(lockId);
|
||||||
ctx.log.info('deploy', 'Port locks released after error', { lockId });
|
log.info('deploy', 'Port locks released after error', { lockId });
|
||||||
} catch (releaseError) {
|
} catch (releaseError) {
|
||||||
ctx.log.error('deploy', 'Failed to release port locks', { lockId, error: releaseError.message });
|
log.error('deploy', 'Failed to release port locks', { lockId, error: releaseError.message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
throw deployError;
|
throw deployError;
|
||||||
@@ -188,7 +215,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Check for existing container before deployment
|
// Check for existing container before deployment
|
||||||
router.post('/apps/check-existing', ctx.asyncHandler(async (req, res) => {
|
router.post('/apps/check-existing', asyncHandler(async (req, res) => {
|
||||||
const { appId } = req.body;
|
const { appId } = req.body;
|
||||||
const template = ctx.APP_TEMPLATES[appId];
|
const template = ctx.APP_TEMPLATES[appId];
|
||||||
if (!template) throw new ValidationError('Invalid app template');
|
if (!template) throw new ValidationError('Invalid app template');
|
||||||
@@ -201,7 +228,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
}, 'check-existing'));
|
}, 'check-existing'));
|
||||||
|
|
||||||
// Deploy new app
|
// Deploy new app
|
||||||
router.post('/apps/deploy', ctx.asyncHandler(async (req, res) => {
|
router.post('/apps/deploy', asyncHandler(async (req, res) => {
|
||||||
const { appId, config } = req.body;
|
const { appId, config } = req.body;
|
||||||
if (!appId || typeof appId !== 'string') {
|
if (!appId || typeof appId !== 'string') {
|
||||||
throw new ValidationError('appId is required');
|
throw new ValidationError('appId is required');
|
||||||
@@ -213,10 +240,10 @@ module.exports = function(ctx, helpers) {
|
|||||||
throw new ValidationError('config.subdomain is required');
|
throw new ValidationError('config.subdomain is required');
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
ctx.log.info('deploy', 'Deploying app', { appId, subdomain: config.subdomain });
|
log.info('deploy', 'Deploying app', { appId, subdomain: config.subdomain });
|
||||||
const template = ctx.APP_TEMPLATES[appId];
|
const template = ctx.APP_TEMPLATES[appId];
|
||||||
if (!template) {
|
if (!template) {
|
||||||
await ctx.logError('app-deploy', new Error('Invalid app template'), { appId, config });
|
await logError('app-deploy', new Error('Invalid app template'), { appId, config });
|
||||||
throw new ValidationError('Invalid app template');
|
throw new ValidationError('Invalid app template');
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -226,7 +253,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
}
|
}
|
||||||
// Block reserved path names in subdirectory mode
|
// Block reserved path names in subdirectory mode
|
||||||
if (ctx.siteConfig.routingMode === 'subdirectory' && helpers.RESERVED_SUBPATHS.includes(config.subdomain)) {
|
if (ctx.siteConfig.routingMode === 'subdirectory' && helpers.RESERVED_SUBPATHS.includes(config.subdomain)) {
|
||||||
return ctx.errorResponse(res, 400, `[DC-301] "${config.subdomain}" is a reserved path and cannot be used in subdirectory mode`);
|
return errorResponse(res, 400, `[DC-301] "${config.subdomain}" is a reserved path and cannot be used in subdirectory mode`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (config.port && !isValidPort(config.port)) {
|
if (config.port && !isValidPort(config.port)) {
|
||||||
@@ -236,7 +263,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
if (!template.isStaticSite) {
|
if (!template.isStaticSite) {
|
||||||
const allowedHostnames = ['localhost', 'host.docker.internal'];
|
const allowedHostnames = ['localhost', 'host.docker.internal'];
|
||||||
if (config.ip && !validatorLib.isIP(config.ip) && !allowedHostnames.includes(config.ip)) {
|
if (config.ip && !validatorLib.isIP(config.ip) && !allowedHostnames.includes(config.ip)) {
|
||||||
return ctx.errorResponse(res, 400, '[DC-210] Invalid IP address. Use a valid IP (e.g., 192.168.x.x) or "localhost".');
|
return errorResponse(res, 400, '[DC-210] Invalid IP address. Use a valid IP (e.g., 192.168.x.x) or "localhost".');
|
||||||
}
|
}
|
||||||
if (!config.ip) config.ip = ctx.siteConfig.dnsServerIp || 'localhost';
|
if (!config.ip) config.ip = ctx.siteConfig.dnsServerIp || 'localhost';
|
||||||
} else {
|
} else {
|
||||||
@@ -246,26 +273,29 @@ module.exports = function(ctx, helpers) {
|
|||||||
|
|
||||||
let containerId;
|
let containerId;
|
||||||
let usedExisting = false;
|
let usedExisting = false;
|
||||||
|
|
||||||
|
// Process template variables for manifest (only needed for Docker containers)
|
||||||
|
const processedTemplate = template.isStaticSite ? null : helpers.processTemplateVariables(template, config);
|
||||||
|
|
||||||
if (template.isStaticSite) {
|
if (template.isStaticSite) {
|
||||||
ctx.log.info('deploy', 'Deploying static site', { appId });
|
log.info('deploy', 'Deploying static site', { appId });
|
||||||
if (appId === 'dashca') {
|
if (appId === 'dashca') {
|
||||||
await deployDashCAStaticSite(template, config);
|
await deployDashCAStaticSite(template, config);
|
||||||
containerId = null;
|
containerId = null;
|
||||||
ctx.log.info('deploy', 'Static site deployed', { appId });
|
log.info('deploy', 'Static site deployed', { appId });
|
||||||
} else {
|
} else {
|
||||||
throw new Error(`Unknown static site type: ${appId}`);
|
throw new Error(`Unknown static site type: ${appId}`);
|
||||||
}
|
}
|
||||||
} else if (config.useExisting && config.existingContainerId) {
|
} else if (config.useExisting && config.existingContainerId) {
|
||||||
containerId = config.existingContainerId;
|
containerId = config.existingContainerId;
|
||||||
usedExisting = true;
|
usedExisting = true;
|
||||||
ctx.log.info('deploy', 'Using existing container', { containerId });
|
log.info('deploy', 'Using existing container', { containerId });
|
||||||
if (config.existingPort && !config.port) config.port = config.existingPort;
|
if (config.existingPort && !config.port) config.port = config.existingPort;
|
||||||
} else {
|
} else {
|
||||||
containerId = await deployContainer(appId, config, template);
|
containerId = await deployContainer(appId, config, template);
|
||||||
ctx.log.info('deploy', 'Container deployed', { containerId });
|
log.info('deploy', 'Container deployed', { containerId });
|
||||||
await helpers.waitForHealthCheck(containerId, template.healthCheck, config.port || template.defaultPort);
|
await helpers.waitForHealthCheck(containerId, template.healthCheck, config.port || template.defaultPort);
|
||||||
ctx.log.info('deploy', 'Container is healthy', { containerId });
|
log.info('deploy', 'Container is healthy', { containerId });
|
||||||
}
|
}
|
||||||
|
|
||||||
const isSubdirectoryMode = ctx.siteConfig.routingMode === 'subdirectory' && ctx.siteConfig.domain;
|
const isSubdirectoryMode = ctx.siteConfig.routingMode === 'subdirectory' && ctx.siteConfig.domain;
|
||||||
@@ -275,11 +305,11 @@ module.exports = function(ctx, helpers) {
|
|||||||
if (config.createDns && !isSubdirectoryMode) {
|
if (config.createDns && !isSubdirectoryMode) {
|
||||||
try {
|
try {
|
||||||
await ctx.dns.createRecord(config.subdomain, config.ip);
|
await ctx.dns.createRecord(config.subdomain, config.ip);
|
||||||
ctx.log.info('deploy', 'DNS record created', { domain: ctx.buildDomain(config.subdomain), ip: config.ip });
|
log.info('deploy', 'DNS record created', { domain: ctx.buildDomain(config.subdomain), ip: config.ip });
|
||||||
} catch (dnsError) {
|
} catch (dnsError) {
|
||||||
await ctx.logError('app-deploy-dns', dnsError, { appId, subdomain: config.subdomain, ip: config.ip });
|
await logError('app-deploy-dns', dnsError, { appId, subdomain: config.subdomain, ip: config.ip });
|
||||||
dnsWarning = `DNS creation failed: ${dnsError.message}. You may need to create the DNS record manually.`;
|
dnsWarning = `DNS creation failed: ${dnsError.message}. You may need to create the DNS record manually.`;
|
||||||
ctx.log.warn('deploy', 'DNS creation failed during deploy', { error: dnsError.message });
|
log.warn('deploy', 'DNS creation failed during deploy', { error: dnsError.message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -298,7 +328,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
}
|
}
|
||||||
caddyConfig = helpers.generateStaticSiteConfig(config.subdomain, sitePath, caddyOptions);
|
caddyConfig = helpers.generateStaticSiteConfig(config.subdomain, sitePath, caddyOptions);
|
||||||
} else {
|
} else {
|
||||||
caddyConfig = ctx.caddy.generateConfig(config.subdomain, config.ip, config.port || template.defaultPort, caddyOptions);
|
caddyConfig = caddy.generateConfig(config.subdomain, config.ip, config.port || template.defaultPort, caddyOptions);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write Caddy config (subdirectory: inject into main block; subdomain: append as new block)
|
// Write Caddy config (subdirectory: inject into main block; subdomain: append as new block)
|
||||||
@@ -308,7 +338,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
} else {
|
} else {
|
||||||
await helpers.addCaddyConfig(config.subdomain, caddyConfig);
|
await helpers.addCaddyConfig(config.subdomain, caddyConfig);
|
||||||
}
|
}
|
||||||
ctx.log.info('deploy', 'Caddy config added', { domain: ctx.buildDomain(config.subdomain), routingMode: ctx.siteConfig.routingMode, tailscaleOnly: config.tailscaleOnly || false });
|
log.info('deploy', 'Caddy config added', { domain: ctx.buildDomain(config.subdomain), routingMode: ctx.siteConfig.routingMode, tailscaleOnly: config.tailscaleOnly || false });
|
||||||
|
|
||||||
// Build service URL based on routing mode
|
// Build service URL based on routing mode
|
||||||
const serviceUrl = ctx.buildServiceUrl(config.subdomain);
|
const serviceUrl = ctx.buildServiceUrl(config.subdomain);
|
||||||
@@ -361,7 +391,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
deployedAt: new Date().toISOString(),
|
deployedAt: new Date().toISOString(),
|
||||||
deploymentManifest
|
deploymentManifest
|
||||||
});
|
});
|
||||||
ctx.log.info('deploy', 'Service added to dashboard', { subdomain: config.subdomain });
|
log.info('deploy', 'Service added to dashboard', { subdomain: config.subdomain });
|
||||||
|
|
||||||
const response = {
|
const response = {
|
||||||
success: true, containerId, usedExisting,
|
success: true, containerId, usedExisting,
|
||||||
@@ -378,11 +408,11 @@ module.exports = function(ctx, helpers) {
|
|||||||
|
|
||||||
res.json(response);
|
res.json(response);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
await ctx.logError('app-deploy', error, { appId, config });
|
await logError('app-deploy', error, { appId, config });
|
||||||
ctx.log.error('deploy', 'Deployment failed', { appId, error: error.message });
|
log.error('deploy', 'Deployment failed', { appId, error: error.message });
|
||||||
const template = ctx.APP_TEMPLATES[appId];
|
const template = ctx.APP_TEMPLATES[appId];
|
||||||
ctx.notification.send('deploymentFailed', 'Deployment Failed', `Failed to deploy **${template?.name || appId}**.\nError: ${error.message}`, 'error');
|
ctx.notification.send('deploymentFailed', 'Deployment Failed', `Failed to deploy **${template?.name || appId}**.\nError: ${error.message}`, 'error');
|
||||||
ctx.errorResponse(res, 500, ctx.safeErrorMessage(error));
|
errorResponse(res, 500, ctx.safeErrorMessage(error));
|
||||||
}
|
}
|
||||||
}, 'apps-deploy'));
|
}, 'apps-deploy'));
|
||||||
|
|
||||||
|
|||||||
@@ -6,12 +6,23 @@ const { REGEX, DOCKER } = require('../../constants');
|
|||||||
const { exists } = require('../../fs-helpers');
|
const { exists } = require('../../fs-helpers');
|
||||||
const platformPaths = require('../../platform-paths');
|
const platformPaths = require('../../platform-paths');
|
||||||
|
|
||||||
module.exports = function(ctx) {
|
/**
|
||||||
|
* Apps helpers factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.docker - Docker client wrapper
|
||||||
|
* @param {Object} deps.caddy - Caddy client
|
||||||
|
* @param {Object} deps.credentialManager - Credential manager
|
||||||
|
* @param {Object} deps.servicesStateManager - Services state manager
|
||||||
|
* @param {Function} deps.fetchT - Timeout-wrapped fetch
|
||||||
|
* @param {Object} deps.log - Logger instance
|
||||||
|
* @returns {Object} Helper functions
|
||||||
|
*/
|
||||||
|
module.exports = function({ docker, caddy, credentialManager, servicesStateManager, fetchT, log }) {
|
||||||
|
|
||||||
async function checkPortConflicts(ports, excludeContainerName = null) {
|
async function checkPortConflicts(ports, excludeContainerName = null) {
|
||||||
const conflicts = [];
|
const conflicts = [];
|
||||||
try {
|
try {
|
||||||
const containers = await ctx.docker.client.listContainers({ all: true });
|
const containers = await docker.client.listContainers({ all: true });
|
||||||
for (const container of containers) {
|
for (const container of containers) {
|
||||||
if (excludeContainerName && container.Names.some(n => n === `/${excludeContainerName}`)) continue;
|
if (excludeContainerName && container.Names.some(n => n === `/${excludeContainerName}`)) continue;
|
||||||
if (container.State !== 'running') continue;
|
if (container.State !== 'running') continue;
|
||||||
@@ -27,14 +38,14 @@ module.exports = function(ctx) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('docker', 'Could not check port conflicts', { error: e.message });
|
log.warn('docker', 'Could not check port conflicts', { error: e.message });
|
||||||
}
|
}
|
||||||
return conflicts;
|
return conflicts;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function findExistingContainerByImage(template) {
|
async function findExistingContainerByImage(template) {
|
||||||
try {
|
try {
|
||||||
const containers = await ctx.docker.client.listContainers({ all: false });
|
const containers = await docker.client.listContainers({ all: false });
|
||||||
const templateImage = template.docker.image.split(':')[0];
|
const templateImage = template.docker.image.split(':')[0];
|
||||||
for (const container of containers) {
|
for (const container of containers) {
|
||||||
const containerImage = container.Image.split(':')[0];
|
const containerImage = container.Image.split(':')[0];
|
||||||
@@ -53,7 +64,7 @@ module.exports = function(ctx) {
|
|||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('docker', 'Could not check for existing containers', { error: e.message });
|
log.warn('docker', 'Could not check for existing containers', { error: e.message });
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -140,7 +151,7 @@ module.exports = function(ctx) {
|
|||||||
normalizedHost === root || normalizedHost.startsWith(root + path.sep)
|
normalizedHost === root || normalizedHost.startsWith(root + path.sep)
|
||||||
);
|
);
|
||||||
if (!isAllowed) {
|
if (!isAllowed) {
|
||||||
ctx.log.warn('deploy', 'Custom volume host path rejected', { hostPath: override.hostPath, allowed: allowedRoots });
|
log.warn('deploy', 'Custom volume host path rejected', { hostPath: override.hostPath, allowed: allowedRoots });
|
||||||
return vol; // Keep original volume, don't apply unsafe override
|
return vol; // Keep original volume, don't apply unsafe override
|
||||||
}
|
}
|
||||||
return `${toDockerDesktopPath(override.hostPath)}:${containerPath}`;
|
return `${toDockerDesktopPath(override.hostPath)}:${containerPath}`;
|
||||||
@@ -243,39 +254,39 @@ module.exports = function(ctx) {
|
|||||||
|
|
||||||
for (let i = 0; i < maxAttempts; i++) {
|
for (let i = 0; i < maxAttempts; i++) {
|
||||||
try {
|
try {
|
||||||
const container = ctx.docker.client.getContainer(containerId);
|
const container = docker.client.getContainer(containerId);
|
||||||
const info = await container.inspect();
|
const info = await container.inspect();
|
||||||
if (info.State.Running) {
|
if (info.State.Running) {
|
||||||
if (info.State.Health) {
|
if (info.State.Health) {
|
||||||
if (info.State.Health.Status === 'healthy') {
|
if (info.State.Health.Status === 'healthy') {
|
||||||
ctx.log.info('docker', 'Container is healthy (Docker health check)', { containerId });
|
log.info('docker', 'Container is healthy (Docker health check)', { containerId });
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
} else if (healthPath && port && httpCheckFailed < 5) {
|
} else if (healthPath && port && httpCheckFailed < 5) {
|
||||||
try {
|
try {
|
||||||
const response = await ctx.fetchT(`http://localhost:${port}${healthPath}`, {
|
const response = await fetchT(`http://localhost:${port}${healthPath}`, {
|
||||||
signal: AbortSignal.timeout(3000), redirect: 'manual'
|
signal: AbortSignal.timeout(3000), redirect: 'manual'
|
||||||
});
|
});
|
||||||
if (response.ok || (response.status >= 300 && response.status < 400)) {
|
if (response.ok || (response.status >= 300 && response.status < 400)) {
|
||||||
ctx.log.info('docker', 'Health check passed', { containerId, status: response.status });
|
log.info('docker', 'Health check passed', { containerId, status: response.status });
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
httpCheckFailed++;
|
httpCheckFailed++;
|
||||||
ctx.log.debug('docker', 'HTTP health check failed', { attempt: httpCheckFailed, error: e.message });
|
log.debug('docker', 'HTTP health check failed', { attempt: httpCheckFailed, error: e.message });
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (i >= 5) {
|
if (i >= 5) {
|
||||||
ctx.log.info('docker', 'Container is running', { containerId, waitedSeconds: i * delay / 1000 });
|
log.info('docker', 'Container is running', { containerId, waitedSeconds: i * delay / 1000 });
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.debug('docker', 'Health check attempt failed', { attempt: i + 1, error: e.message });
|
log.debug('docker', 'Health check attempt failed', { attempt: i + 1, error: e.message });
|
||||||
}
|
}
|
||||||
if (i < maxAttempts - 1) {
|
if (i < maxAttempts - 1) {
|
||||||
ctx.log.debug('docker', 'Waiting for container to be healthy', { attempt: i + 1, maxAttempts });
|
log.debug('docker', 'Waiting for container to be healthy', { attempt: i + 1, maxAttempts });
|
||||||
await new Promise(resolve => setTimeout(resolve, delay));
|
await new Promise(resolve => setTimeout(resolve, delay));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -284,15 +295,15 @@ module.exports = function(ctx) {
|
|||||||
|
|
||||||
async function addCaddyConfig(subdomain, config) {
|
async function addCaddyConfig(subdomain, config) {
|
||||||
const domain = ctx.buildDomain(subdomain);
|
const domain = ctx.buildDomain(subdomain);
|
||||||
const existing = await ctx.caddy.read();
|
const existing = await caddy.read();
|
||||||
if (existing.includes(`${domain} {`)) {
|
if (existing.includes(`${domain} {`)) {
|
||||||
ctx.log.info('caddy', 'Caddy config already exists, skipping add', { domain });
|
log.info('caddy', 'Caddy config already exists, skipping add', { domain });
|
||||||
await ctx.caddy.reload(existing);
|
await caddy.reload(existing);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const result = await ctx.caddy.modify(c => c + `\n${config}\n`);
|
const result = await caddy.modify(c => c + `\n${config}\n`);
|
||||||
if (!result.success) throw new Error(`[DC-303] Failed to add Caddy config for ${domain}: ${result.error}`);
|
if (!result.success) throw new Error(`[DC-303] Failed to add Caddy config for ${domain}: ${result.error}`);
|
||||||
await ctx.caddy.verifySite(domain);
|
await caddy.verifySite(domain);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Reserved paths that cannot be used as subpath names in subdirectory mode
|
// Reserved paths that cannot be used as subpath names in subdirectory mode
|
||||||
@@ -303,7 +314,7 @@ module.exports = function(ctx) {
|
|||||||
async function ensureMainDomainBlock() {
|
async function ensureMainDomainBlock() {
|
||||||
if (ctx.siteConfig.routingMode !== 'subdirectory' || !ctx.siteConfig.domain) return;
|
if (ctx.siteConfig.routingMode !== 'subdirectory' || !ctx.siteConfig.domain) return;
|
||||||
|
|
||||||
const content = await ctx.caddy.read();
|
const content = await caddy.read();
|
||||||
const domain = ctx.siteConfig.domain;
|
const domain = ctx.siteConfig.domain;
|
||||||
const ROUTE_MARKER = '# === DashCaddy App Routes ===';
|
const ROUTE_MARKER = '# === DashCaddy App Routes ===';
|
||||||
|
|
||||||
@@ -312,7 +323,7 @@ module.exports = function(ctx) {
|
|||||||
|
|
||||||
// Domain block exists but lacks markers — inject them
|
// Domain block exists but lacks markers — inject them
|
||||||
if (content.includes(`${domain} {`)) {
|
if (content.includes(`${domain} {`)) {
|
||||||
const result = await ctx.caddy.modify(c => {
|
const result = await caddy.modify(c => {
|
||||||
// Insert markers before the final catch-all handle block inside the domain block
|
// Insert markers before the final catch-all handle block inside the domain block
|
||||||
const domainStart = c.indexOf(`${domain} {`);
|
const domainStart = c.indexOf(`${domain} {`);
|
||||||
// Find standalone "handle {" (catch-all SPA fallback) — match tabs or spaces
|
// Find standalone "handle {" (catch-all SPA fallback) — match tabs or spaces
|
||||||
@@ -325,7 +336,7 @@ module.exports = function(ctx) {
|
|||||||
return c.slice(0, handleIdx) + markerBlock + c.slice(handleIdx);
|
return c.slice(0, handleIdx) + markerBlock + c.slice(handleIdx);
|
||||||
});
|
});
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
ctx.log.info('caddy', 'Injected route markers into existing domain block', { domain });
|
log.info('caddy', 'Injected route markers into existing domain block', { domain });
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -335,9 +346,9 @@ module.exports = function(ctx) {
|
|||||||
const apiPort = process.env.PORT || 3001;
|
const apiPort = process.env.PORT || 3001;
|
||||||
const block = `\n${domain} {\n root * ${dashboardRoot}\n encode gzip\n\n handle /api/* {\n reverse_proxy localhost:${apiPort}\n }\n\n handle /probe/* {\n reverse_proxy localhost:${apiPort}\n }\n\n ${ROUTE_MARKER}\n # === End App Routes ===\n\n handle {\n @notFile not file {path}\n rewrite @notFile /index.html\n file_server\n }\n}\n`;
|
const block = `\n${domain} {\n root * ${dashboardRoot}\n encode gzip\n\n handle /api/* {\n reverse_proxy localhost:${apiPort}\n }\n\n handle /probe/* {\n reverse_proxy localhost:${apiPort}\n }\n\n ${ROUTE_MARKER}\n # === End App Routes ===\n\n handle {\n @notFile not file {path}\n rewrite @notFile /index.html\n file_server\n }\n}\n`;
|
||||||
|
|
||||||
const result = await ctx.caddy.modify(c => c + block);
|
const result = await caddy.modify(c => c + block);
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
ctx.log.info('caddy', 'Created main domain block with route markers', { domain });
|
log.info('caddy', 'Created main domain block with route markers', { domain });
|
||||||
} else {
|
} else {
|
||||||
throw new Error(`[DC-303] Failed to create main domain block for ${domain}: ${result.error}`);
|
throw new Error(`[DC-303] Failed to create main domain block for ${domain}: ${result.error}`);
|
||||||
}
|
}
|
||||||
@@ -349,9 +360,9 @@ module.exports = function(ctx) {
|
|||||||
const endMarker = `# --- End: ${subdomain} ---`;
|
const endMarker = `# --- End: ${subdomain} ---`;
|
||||||
const END_ROUTE_MARKER = '# === End App Routes ===';
|
const END_ROUTE_MARKER = '# === End App Routes ===';
|
||||||
|
|
||||||
const result = await ctx.caddy.modify(content => {
|
const result = await caddy.modify(content => {
|
||||||
if (content.includes(marker)) {
|
if (content.includes(marker)) {
|
||||||
ctx.log.info('caddy', 'Subpath config already exists, skipping', { subdomain });
|
log.info('caddy', 'Subpath config already exists, skipping', { subdomain });
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -378,7 +389,7 @@ module.exports = function(ctx) {
|
|||||||
const marker = `# --- DashCaddy: ${subdomain} ---`;
|
const marker = `# --- DashCaddy: ${subdomain} ---`;
|
||||||
const endMarker = `# --- End: ${subdomain} ---`;
|
const endMarker = `# --- End: ${subdomain} ---`;
|
||||||
|
|
||||||
return await ctx.caddy.modify(content => {
|
return await caddy.modify(content => {
|
||||||
const startIdx = content.indexOf(marker);
|
const startIdx = content.indexOf(marker);
|
||||||
if (startIdx === -1) return null;
|
if (startIdx === -1) return null;
|
||||||
|
|
||||||
|
|||||||
@@ -5,14 +5,44 @@ const initRemoval = require('./removal');
|
|||||||
const initTemplates = require('./templates');
|
const initTemplates = require('./templates');
|
||||||
const initRestore = require('./restore');
|
const initRestore = require('./restore');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apps routes aggregator
|
||||||
|
* Assembles all apps sub-routes with their dependencies
|
||||||
|
* @param {Object} ctx - Application context (for backward compatibility)
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
module.exports = function(ctx) {
|
module.exports = function(ctx) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const helpers = initHelpers(ctx);
|
|
||||||
|
|
||||||
router.use(initDeploy(ctx, helpers));
|
// Extract dependencies from context
|
||||||
router.use(initRemoval(ctx, helpers));
|
const deps = {
|
||||||
router.use(initTemplates(ctx, helpers));
|
docker: ctx.docker,
|
||||||
router.use(initRestore(ctx, helpers));
|
caddy: ctx.caddy,
|
||||||
|
credentialManager: ctx.credentialManager,
|
||||||
|
servicesStateManager: ctx.servicesStateManager,
|
||||||
|
portLockManager: ctx.portLockManager,
|
||||||
|
asyncHandler: ctx.asyncHandler,
|
||||||
|
errorResponse: ctx.errorResponse,
|
||||||
|
log: ctx.log,
|
||||||
|
// Additional context properties needed by routes
|
||||||
|
APP_TEMPLATES: ctx.APP_TEMPLATES,
|
||||||
|
siteConfig: ctx.siteConfig,
|
||||||
|
buildDomain: ctx.buildDomain,
|
||||||
|
buildServiceUrl: ctx.buildServiceUrl,
|
||||||
|
addServiceToConfig: ctx.addServiceToConfig,
|
||||||
|
dns: ctx.dns,
|
||||||
|
notification: ctx.notification,
|
||||||
|
safeErrorMessage: ctx.safeErrorMessage
|
||||||
|
};
|
||||||
|
|
||||||
|
// Initialize helpers with dependencies
|
||||||
|
const helpers = initHelpers(deps);
|
||||||
|
|
||||||
|
// Mount sub-routes with explicit dependencies
|
||||||
|
router.use(initDeploy({ ...deps, helpers }));
|
||||||
|
router.use(initRemoval({ ...deps, helpers }));
|
||||||
|
router.use(initTemplates({ ...deps, helpers }));
|
||||||
|
router.use(initRestore({ ...deps, helpers }));
|
||||||
|
|
||||||
return router;
|
return router;
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,35 +1,46 @@
|
|||||||
const express = require('express');
|
const express = require('express');
|
||||||
const { exists } = require('../../fs-helpers');
|
const { exists } = require('../../fs-helpers');
|
||||||
|
|
||||||
module.exports = function(ctx, helpers) {
|
module.exports = function({ docker, caddy, servicesStateManager, asyncHandler, log, helpers }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// Remove deployed app
|
// Remove deployed app
|
||||||
router.delete('/apps/:appId', ctx.asyncHandler(async (req, res) => {
|
/**
|
||||||
|
* Apps removal routes factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.docker - Docker client wrapper
|
||||||
|
* @param {Object} deps.caddy - Caddy client
|
||||||
|
* @param {Object} deps.servicesStateManager - Services state manager
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Object} deps.log - Logger instance
|
||||||
|
* @param {Object} deps.helpers - Apps helpers module
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
router.delete('/apps/:appId', asyncHandler(async (req, res) => {
|
||||||
const { appId } = req.params;
|
const { appId } = req.params;
|
||||||
const { containerId, subdomain, ip, deleteContainer } = req.query;
|
const { containerId, subdomain, ip, deleteContainer } = req.query;
|
||||||
const shouldDeleteContainer = deleteContainer === 'true';
|
const shouldDeleteContainer = deleteContainer === 'true';
|
||||||
const results = { container: null, dns: null, caddy: null, service: null };
|
const results = { container: null, dns: null, caddy: null, service: null };
|
||||||
|
|
||||||
try {
|
try {
|
||||||
ctx.log.info('deploy', 'Removing app', { appId, containerId, subdomain, deleteContainer: shouldDeleteContainer });
|
log.info('deploy', 'Removing app', { appId, containerId, subdomain, deleteContainer: shouldDeleteContainer });
|
||||||
|
|
||||||
if (containerId && shouldDeleteContainer) {
|
if (containerId && shouldDeleteContainer) {
|
||||||
try {
|
try {
|
||||||
const container = ctx.docker.client.getContainer(containerId);
|
const container = docker.client.getContainer(containerId);
|
||||||
try { await container.stop(); ctx.log.info('docker', 'Container stopped', { containerId }); }
|
try { await container.stop(); log.info('docker', 'Container stopped', { containerId }); }
|
||||||
catch (stopError) { ctx.log.debug('docker', 'Container stop note', { containerId, note: stopError.message }); }
|
catch (stopError) { log.debug('docker', 'Container stop note', { containerId, note: stopError.message }); }
|
||||||
await container.remove({ force: true });
|
await container.remove({ force: true });
|
||||||
results.container = 'removed';
|
results.container = 'removed';
|
||||||
ctx.log.info('docker', 'Container removed', { containerId });
|
log.info('docker', 'Container removed', { containerId });
|
||||||
// Prune dangling images after removal
|
// Prune dangling images after removal
|
||||||
try {
|
try {
|
||||||
const pruneResult = await ctx.docker.client.pruneImages({ filters: { dangling: { true: true } } });
|
const pruneResult = await docker.client.pruneImages({ filters: { dangling: { true: true } } });
|
||||||
if (pruneResult.SpaceReclaimed > 0) {
|
if (pruneResult.SpaceReclaimed > 0) {
|
||||||
ctx.log.info('docker', 'Pruned dangling images after removal', { spaceReclaimed: Math.round(pruneResult.SpaceReclaimed / 1024 / 1024) + 'MB' });
|
log.info('docker', 'Pruned dangling images after removal', { spaceReclaimed: Math.round(pruneResult.SpaceReclaimed / 1024 / 1024) + 'MB' });
|
||||||
}
|
}
|
||||||
} catch (pruneErr) {
|
} catch (pruneErr) {
|
||||||
ctx.log.debug('docker', 'Image prune after removal failed', { error: pruneErr.message });
|
log.debug('docker', 'Image prune after removal failed', { error: pruneErr.message });
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
results.container = error.message.includes('no such container') ? 'already removed' : error.message;
|
results.container = error.message.includes('no such container') ? 'already removed' : error.message;
|
||||||
@@ -53,7 +64,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
token: ctx.dns.getToken(), domain, type: 'A', ipAddress: recordIp
|
token: ctx.dns.getToken(), domain, type: 'A', ipAddress: recordIp
|
||||||
});
|
});
|
||||||
results.dns = dnsResult.status === 'ok' ? 'deleted' : (dnsResult.errorMessage || 'failed');
|
results.dns = dnsResult.status === 'ok' ? 'deleted' : (dnsResult.errorMessage || 'failed');
|
||||||
ctx.log.info('dns', 'DNS record removal', { result: results.dns });
|
log.info('dns', 'DNS record removal', { result: results.dns });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
results.dns = error.message;
|
results.dns = error.message;
|
||||||
}
|
}
|
||||||
@@ -66,7 +77,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
if (shouldDeleteContainer && subdomain) {
|
if (shouldDeleteContainer && subdomain) {
|
||||||
try {
|
try {
|
||||||
// Check if this service was deployed in subdirectory mode
|
// Check if this service was deployed in subdirectory mode
|
||||||
const services = await ctx.servicesStateManager.read();
|
const services = await servicesStateManager.read();
|
||||||
const serviceList = Array.isArray(services) ? services : [];
|
const serviceList = Array.isArray(services) ? services : [];
|
||||||
const service = serviceList.find(s => s.id === subdomain);
|
const service = serviceList.find(s => s.id === subdomain);
|
||||||
|
|
||||||
@@ -79,14 +90,14 @@ module.exports = function(ctx, helpers) {
|
|||||||
const domain = ctx.buildDomain(subdomain);
|
const domain = ctx.buildDomain(subdomain);
|
||||||
const escapedDomain = domain.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
const escapedDomain = domain.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||||
const siteBlockRegex = new RegExp(`\\n?${escapedDomain}\\s*\\{[^{}]*(?:\\{[^{}]*(?:\\{[^{}]*\\}[^{}]*)*\\}[^{}]*)*\\}\\s*`, 'g');
|
const siteBlockRegex = new RegExp(`\\n?${escapedDomain}\\s*\\{[^{}]*(?:\\{[^{}]*(?:\\{[^{}]*\\}[^{}]*)*\\}[^{}]*)*\\}\\s*`, 'g');
|
||||||
const caddyResult = await ctx.caddy.modify(currentContent => {
|
const caddyResult = await caddy.modify(currentContent => {
|
||||||
const replaced = currentContent.replace(siteBlockRegex, '\n');
|
const replaced = currentContent.replace(siteBlockRegex, '\n');
|
||||||
if (replaced.length === currentContent.length) return null;
|
if (replaced.length === currentContent.length) return null;
|
||||||
return replaced.replace(/\n{3,}/g, '\n\n');
|
return replaced.replace(/\n{3,}/g, '\n\n');
|
||||||
});
|
});
|
||||||
results.caddy = caddyResult.success ? 'removed' : (caddyResult.rolledBack ? 'removed (reload failed)' : 'not found');
|
results.caddy = caddyResult.success ? 'removed' : (caddyResult.rolledBack ? 'removed (reload failed)' : 'not found');
|
||||||
}
|
}
|
||||||
ctx.log.info('caddy', 'Caddy config removal', { result: results.caddy });
|
log.info('caddy', 'Caddy config removal', { result: results.caddy });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
results.caddy = error.message;
|
results.caddy = error.message;
|
||||||
}
|
}
|
||||||
@@ -97,7 +108,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
try {
|
try {
|
||||||
if (await exists(ctx.SERVICES_FILE)) {
|
if (await exists(ctx.SERVICES_FILE)) {
|
||||||
let removed = false;
|
let removed = false;
|
||||||
await ctx.servicesStateManager.update(services => {
|
await servicesStateManager.update(services => {
|
||||||
const initialLength = services.length;
|
const initialLength = services.length;
|
||||||
const filtered = services.filter(s => s.id !== subdomain);
|
const filtered = services.filter(s => s.id !== subdomain);
|
||||||
removed = filtered.length !== initialLength;
|
removed = filtered.length !== initialLength;
|
||||||
@@ -105,15 +116,15 @@ module.exports = function(ctx, helpers) {
|
|||||||
});
|
});
|
||||||
results.service = removed ? 'removed' : 'not found';
|
results.service = removed ? 'removed' : 'not found';
|
||||||
}
|
}
|
||||||
ctx.log.info('deploy', 'Service config removal', { result: results.service });
|
log.info('deploy', 'Service config removal', { result: results.service });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
results.service = error.message;
|
results.service = error.message;
|
||||||
}
|
}
|
||||||
|
|
||||||
res.json({ success: true, message: `App ${appId} removal completed`, results });
|
res.json({ success: true, message: `App ${appId} removal completed`, results });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
await ctx.logError('app-removal', error);
|
await logError('app-removal', error);
|
||||||
ctx.errorResponse(res, 500, ctx.safeErrorMessage(error), { results });
|
errorResponse(res, 500, ctx.safeErrorMessage(error), { results });
|
||||||
}
|
}
|
||||||
}, 'apps-delete'));
|
}, 'apps-delete'));
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,18 @@
|
|||||||
const express = require('express');
|
const express = require('express');
|
||||||
const { DOCKER } = require('../../constants');
|
const { DOCKER } = require('../../constants');
|
||||||
|
|
||||||
module.exports = function(ctx, helpers) {
|
/**
|
||||||
|
* Apps restore routes factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.docker - Docker client wrapper
|
||||||
|
* @param {Object} deps.caddy - Caddy client
|
||||||
|
* @param {Object} deps.servicesStateManager - Services state manager
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Object} deps.log - Logger instance
|
||||||
|
* @param {Object} deps.helpers - Apps helpers module
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
module.exports = function({ docker, caddy, servicesStateManager, asyncHandler, log, helpers }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -9,16 +20,16 @@ module.exports = function(ctx, helpers) {
|
|||||||
* Pulls image, creates container, starts it, recreates Caddy config.
|
* Pulls image, creates container, starts it, recreates Caddy config.
|
||||||
* Skips if container is already running.
|
* Skips if container is already running.
|
||||||
*/
|
*/
|
||||||
router.post('/apps/:appId/restore', ctx.asyncHandler(async (req, res) => {
|
router.post('/apps/:appId/restore', asyncHandler(async (req, res) => {
|
||||||
const { appId } = req.params;
|
const { appId } = req.params;
|
||||||
const services = await ctx.servicesStateManager.read();
|
const services = await servicesStateManager.read();
|
||||||
const service = services.find(s => s.id === appId);
|
const service = services.find(s => s.id === appId);
|
||||||
|
|
||||||
if (!service) {
|
if (!service) {
|
||||||
return ctx.errorResponse(res, 404, `Service "${appId}" not found in services.json`);
|
return errorResponse(res, 404, `Service "${appId}" not found in services.json`);
|
||||||
}
|
}
|
||||||
if (!service.deploymentManifest) {
|
if (!service.deploymentManifest) {
|
||||||
return ctx.errorResponse(res, 400, `Service "${appId}" has no deployment manifest — it was deployed before the manifest feature was added. Redeploy it manually to create a manifest.`);
|
return errorResponse(res, 400, `Service "${appId}" has no deployment manifest — it was deployed before the manifest feature was added. Redeploy it manually to create a manifest.`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await restoreService(service);
|
const result = await restoreService(service);
|
||||||
@@ -29,8 +40,8 @@ module.exports = function(ctx, helpers) {
|
|||||||
* Restore all services that have deployment manifests.
|
* Restore all services that have deployment manifests.
|
||||||
* Returns per-service results.
|
* Returns per-service results.
|
||||||
*/
|
*/
|
||||||
router.post('/apps/restore-all', ctx.asyncHandler(async (req, res) => {
|
router.post('/apps/restore-all', asyncHandler(async (req, res) => {
|
||||||
const services = await ctx.servicesStateManager.read();
|
const services = await servicesStateManager.read();
|
||||||
const restoreable = services.filter(s => s.deploymentManifest);
|
const restoreable = services.filter(s => s.deploymentManifest);
|
||||||
|
|
||||||
if (restoreable.length === 0) {
|
if (restoreable.length === 0) {
|
||||||
@@ -70,8 +81,8 @@ module.exports = function(ctx, helpers) {
|
|||||||
/**
|
/**
|
||||||
* List all services and their restore status.
|
* List all services and their restore status.
|
||||||
*/
|
*/
|
||||||
router.get('/apps/restore-status', ctx.asyncHandler(async (req, res) => {
|
router.get('/apps/restore-status', asyncHandler(async (req, res) => {
|
||||||
const services = await ctx.servicesStateManager.read();
|
const services = await servicesStateManager.read();
|
||||||
const status = [];
|
const status = [];
|
||||||
|
|
||||||
for (const service of services) {
|
for (const service of services) {
|
||||||
@@ -87,7 +98,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
// Check if container is currently running
|
// Check if container is currently running
|
||||||
if (service.containerId) {
|
if (service.containerId) {
|
||||||
try {
|
try {
|
||||||
const container = ctx.docker.client.getContainer(service.containerId);
|
const container = docker.client.getContainer(service.containerId);
|
||||||
const info = await container.inspect();
|
const info = await container.inspect();
|
||||||
entry.containerRunning = info.State.Running;
|
entry.containerRunning = info.State.Running;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
@@ -108,11 +119,11 @@ module.exports = function(ctx, helpers) {
|
|||||||
const manifest = service.deploymentManifest;
|
const manifest = service.deploymentManifest;
|
||||||
const template = ctx.APP_TEMPLATES[manifest.templateId];
|
const template = ctx.APP_TEMPLATES[manifest.templateId];
|
||||||
|
|
||||||
ctx.log.info('restore', `Restoring service: ${service.name}`, { id: service.id, templateId: manifest.templateId });
|
log.info('restore', `Restoring service: ${service.name}`, { id: service.id, templateId: manifest.templateId });
|
||||||
|
|
||||||
// Static sites: just recreate Caddy config
|
// Static sites: just recreate Caddy config
|
||||||
if (template?.isStaticSite) {
|
if (template?.isStaticSite) {
|
||||||
ctx.log.info('restore', `Restoring static site Caddy config: ${service.name}`);
|
log.info('restore', `Restoring static site Caddy config: ${service.name}`);
|
||||||
const caddyOptions = {
|
const caddyOptions = {
|
||||||
tailscaleOnly: manifest.caddy.tailscaleOnly,
|
tailscaleOnly: manifest.caddy.tailscaleOnly,
|
||||||
allowedIPs: manifest.caddy.allowedIPs,
|
allowedIPs: manifest.caddy.allowedIPs,
|
||||||
@@ -132,10 +143,10 @@ module.exports = function(ctx, helpers) {
|
|||||||
// Docker container: check if already running
|
// Docker container: check if already running
|
||||||
if (service.containerId) {
|
if (service.containerId) {
|
||||||
try {
|
try {
|
||||||
const existing = ctx.docker.client.getContainer(service.containerId);
|
const existing = docker.client.getContainer(service.containerId);
|
||||||
const info = await existing.inspect();
|
const info = await existing.inspect();
|
||||||
if (info.State.Running) {
|
if (info.State.Running) {
|
||||||
ctx.log.info('restore', `Container already running, skipping: ${service.name}`);
|
log.info('restore', `Container already running, skipping: ${service.name}`);
|
||||||
return {
|
return {
|
||||||
id: service.id,
|
id: service.id,
|
||||||
name: service.name,
|
name: service.name,
|
||||||
@@ -151,11 +162,11 @@ module.exports = function(ctx, helpers) {
|
|||||||
// Also check by name (container ID may have changed)
|
// Also check by name (container ID may have changed)
|
||||||
const containerName = `${DOCKER.CONTAINER_PREFIX}${manifest.config.subdomain}`;
|
const containerName = `${DOCKER.CONTAINER_PREFIX}${manifest.config.subdomain}`;
|
||||||
try {
|
try {
|
||||||
const byName = ctx.docker.client.getContainer(containerName);
|
const byName = docker.client.getContainer(containerName);
|
||||||
const info = await byName.inspect();
|
const info = await byName.inspect();
|
||||||
if (info.State.Running) {
|
if (info.State.Running) {
|
||||||
// Update the service entry with the current container ID
|
// Update the service entry with the current container ID
|
||||||
await ctx.servicesStateManager.update(services => {
|
await servicesStateManager.update(services => {
|
||||||
const svc = services.find(s => s.id === service.id);
|
const svc = services.find(s => s.id === service.id);
|
||||||
if (svc) svc.containerId = info.Id;
|
if (svc) svc.containerId = info.Id;
|
||||||
return services;
|
return services;
|
||||||
@@ -183,18 +194,18 @@ module.exports = function(ctx, helpers) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Pull image
|
// Pull image
|
||||||
ctx.log.info('restore', `Pulling image: ${manifest.container.image}`);
|
log.info('restore', `Pulling image: ${manifest.container.image}`);
|
||||||
try {
|
try {
|
||||||
await ctx.docker.pull(manifest.container.image);
|
await docker.pull(manifest.container.image);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Check if image exists locally
|
// Check if image exists locally
|
||||||
const images = await ctx.docker.client.listImages({
|
const images = await docker.client.listImages({
|
||||||
filters: { reference: [manifest.container.image] }
|
filters: { reference: [manifest.container.image] }
|
||||||
});
|
});
|
||||||
if (images.length === 0) {
|
if (images.length === 0) {
|
||||||
throw new Error(`Failed to pull image ${manifest.container.image}: ${e.message}`);
|
throw new Error(`Failed to pull image ${manifest.container.image}: ${e.message}`);
|
||||||
}
|
}
|
||||||
ctx.log.warn('restore', `Pull failed, using local image: ${manifest.container.image}`);
|
log.warn('restore', `Pull failed, using local image: ${manifest.container.image}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build container config from manifest
|
// Build container config from manifest
|
||||||
@@ -231,10 +242,10 @@ module.exports = function(ctx, helpers) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Create and start container
|
// Create and start container
|
||||||
ctx.log.info('restore', `Creating container: ${containerName}`);
|
log.info('restore', `Creating container: ${containerName}`);
|
||||||
const container = await ctx.docker.client.createContainer(containerConfig);
|
const container = await docker.client.createContainer(containerConfig);
|
||||||
await container.start();
|
await container.start();
|
||||||
ctx.log.info('restore', `Container started: ${containerName}`);
|
log.info('restore', `Container started: ${containerName}`);
|
||||||
|
|
||||||
// Recreate Caddy config
|
// Recreate Caddy config
|
||||||
const port = manifest.config.port;
|
const port = manifest.config.port;
|
||||||
@@ -245,19 +256,19 @@ module.exports = function(ctx, helpers) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if (manifest.caddy.routingMode === 'subdirectory') {
|
if (manifest.caddy.routingMode === 'subdirectory') {
|
||||||
const caddyConfig = ctx.caddy.generateConfig(manifest.config.subdomain, manifest.config.ip, port, caddyOptions);
|
const caddyConfig = caddy.generateConfig(manifest.config.subdomain, manifest.config.ip, port, caddyOptions);
|
||||||
try {
|
try {
|
||||||
await helpers.ensureMainDomainBlock();
|
await helpers.ensureMainDomainBlock();
|
||||||
await helpers.addSubpathConfig(manifest.config.subdomain, caddyConfig);
|
await helpers.addSubpathConfig(manifest.config.subdomain, caddyConfig);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('restore', `Caddy config may already exist: ${e.message}`);
|
log.warn('restore', `Caddy config may already exist: ${e.message}`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const caddyConfig = ctx.caddy.generateConfig(manifest.config.subdomain, manifest.config.ip, port, caddyOptions);
|
const caddyConfig = caddy.generateConfig(manifest.config.subdomain, manifest.config.ip, port, caddyOptions);
|
||||||
try {
|
try {
|
||||||
await helpers.addCaddyConfig(manifest.config.subdomain, caddyConfig);
|
await helpers.addCaddyConfig(manifest.config.subdomain, caddyConfig);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('restore', `Caddy config may already exist: ${e.message}`);
|
log.warn('restore', `Caddy config may already exist: ${e.message}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -265,14 +276,14 @@ module.exports = function(ctx, helpers) {
|
|||||||
if (manifest.config.createDns && manifest.caddy.routingMode !== 'subdirectory') {
|
if (manifest.config.createDns && manifest.caddy.routingMode !== 'subdirectory') {
|
||||||
try {
|
try {
|
||||||
await ctx.dns.createRecord(manifest.config.subdomain, manifest.config.ip);
|
await ctx.dns.createRecord(manifest.config.subdomain, manifest.config.ip);
|
||||||
ctx.log.info('restore', 'DNS record recreated', { subdomain: manifest.config.subdomain });
|
log.info('restore', 'DNS record recreated', { subdomain: manifest.config.subdomain });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('restore', `DNS recreation failed: ${e.message}`);
|
log.warn('restore', `DNS recreation failed: ${e.message}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update the service entry with the new container ID
|
// Update the service entry with the new container ID
|
||||||
await ctx.servicesStateManager.update(services => {
|
await servicesStateManager.update(services => {
|
||||||
const svc = services.find(s => s.id === service.id);
|
const svc = services.find(s => s.id === service.id);
|
||||||
if (svc) {
|
if (svc) {
|
||||||
svc.containerId = container.id;
|
svc.containerId = container.id;
|
||||||
|
|||||||
@@ -1,12 +1,20 @@
|
|||||||
const express = require('express');
|
const express = require('express');
|
||||||
const { exists } = require('../../fs-helpers');
|
const { exists } = require('../../fs-helpers');
|
||||||
|
/**
|
||||||
|
* Apps templates routes factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.servicesStateManager - Services state manager
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Object} deps.helpers - Apps helpers module
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
const { REGEX } = require('../../constants');
|
const { REGEX } = require('../../constants');
|
||||||
|
|
||||||
module.exports = function(ctx, helpers) {
|
module.exports = function({ servicesStateManager, asyncHandler, helpers }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// Get available app templates
|
// Get available app templates
|
||||||
router.get('/apps/templates', ctx.asyncHandler(async (req, res) => {
|
router.get('/apps/templates', asyncHandler(async (req, res) => {
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
templates: ctx.APP_TEMPLATES,
|
templates: ctx.APP_TEMPLATES,
|
||||||
@@ -16,7 +24,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
}, 'apps-templates'));
|
}, 'apps-templates'));
|
||||||
|
|
||||||
// Get specific app template
|
// Get specific app template
|
||||||
router.get('/apps/templates/:appId', ctx.asyncHandler(async (req, res) => {
|
router.get('/apps/templates/:appId', asyncHandler(async (req, res) => {
|
||||||
const { appId } = req.params;
|
const { appId } = req.params;
|
||||||
const template = ctx.APP_TEMPLATES[appId];
|
const template = ctx.APP_TEMPLATES[appId];
|
||||||
if (!template) {
|
if (!template) {
|
||||||
@@ -27,7 +35,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
}, 'apps-template-detail'));
|
}, 'apps-template-detail'));
|
||||||
|
|
||||||
// Check port availability
|
// Check port availability
|
||||||
router.get('/apps/ports/:port/check', ctx.asyncHandler(async (req, res) => {
|
router.get('/apps/ports/:port/check', asyncHandler(async (req, res) => {
|
||||||
const port = req.params.port;
|
const port = req.params.port;
|
||||||
const conflicts = await helpers.checkPortConflicts([port]);
|
const conflicts = await helpers.checkPortConflicts([port]);
|
||||||
if (conflicts.length > 0) {
|
if (conflicts.length > 0) {
|
||||||
@@ -39,21 +47,21 @@ module.exports = function(ctx, helpers) {
|
|||||||
}, 'check-port'));
|
}, 'check-port'));
|
||||||
|
|
||||||
// Get suggested available port
|
// Get suggested available port
|
||||||
router.get('/apps/ports/:basePort/suggest', ctx.asyncHandler(async (req, res) => {
|
router.get('/apps/ports/:basePort/suggest', asyncHandler(async (req, res) => {
|
||||||
const basePort = parseInt(req.params.basePort) || 8080;
|
const basePort = parseInt(req.params.basePort) || 8080;
|
||||||
const maxAttempts = 100;
|
const maxAttempts = 100;
|
||||||
const usedPorts = await ctx.docker.getUsedPorts();
|
const usedPorts = await docker.getUsedPorts();
|
||||||
for (let port = basePort; port < basePort + maxAttempts; port++) {
|
for (let port = basePort; port < basePort + maxAttempts; port++) {
|
||||||
if (!usedPorts.has(port)) {
|
if (!usedPorts.has(port)) {
|
||||||
res.json({ success: true, suggestedPort: port, basePort });
|
res.json({ success: true, suggestedPort: port, basePort });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ctx.errorResponse(res, 400, `No available ports found in range ${basePort}-${basePort + maxAttempts}`);
|
errorResponse(res, 400, `No available ports found in range ${basePort}-${basePort + maxAttempts}`);
|
||||||
}, 'suggest-port'));
|
}, 'suggest-port'));
|
||||||
|
|
||||||
// Update subdomain for deployed app
|
// Update subdomain for deployed app
|
||||||
router.post('/apps/update-subdomain', ctx.asyncHandler(async (req, res) => {
|
router.post('/apps/update-subdomain', asyncHandler(async (req, res) => {
|
||||||
const { serviceId, oldSubdomain, newSubdomain, containerId, ip } = req.body;
|
const { serviceId, oldSubdomain, newSubdomain, containerId, ip } = req.body;
|
||||||
if (!oldSubdomain || typeof oldSubdomain !== 'string') {
|
if (!oldSubdomain || typeof oldSubdomain !== 'string') {
|
||||||
throw new ValidationError('oldSubdomain is required');
|
throw new ValidationError('oldSubdomain is required');
|
||||||
@@ -64,7 +72,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
if (!REGEX.SUBDOMAIN.test(newSubdomain)) {
|
if (!REGEX.SUBDOMAIN.test(newSubdomain)) {
|
||||||
throw new ValidationError('[DC-301] Invalid subdomain format for newSubdomain');
|
throw new ValidationError('[DC-301] Invalid subdomain format for newSubdomain');
|
||||||
}
|
}
|
||||||
ctx.log.info('deploy', 'Updating subdomain', { oldSubdomain, newSubdomain });
|
log.info('deploy', 'Updating subdomain', { oldSubdomain, newSubdomain });
|
||||||
const results = { oldDns: null, newDns: null, caddy: null, service: null };
|
const results = { oldDns: null, newDns: null, caddy: null, service: null };
|
||||||
|
|
||||||
if (oldSubdomain && ctx.dns.getToken()) {
|
if (oldSubdomain && ctx.dns.getToken()) {
|
||||||
@@ -74,10 +82,10 @@ module.exports = function(ctx, helpers) {
|
|||||||
token: ctx.dns.getToken(), domain: oldDomain, type: 'A', ipAddress: ip || 'localhost'
|
token: ctx.dns.getToken(), domain: oldDomain, type: 'A', ipAddress: ip || 'localhost'
|
||||||
});
|
});
|
||||||
results.oldDns = result.status === 'ok' ? 'deleted' : result.errorMessage;
|
results.oldDns = result.status === 'ok' ? 'deleted' : result.errorMessage;
|
||||||
ctx.log.info('dns', 'Old DNS record deleted', { domain: oldDomain });
|
log.info('dns', 'Old DNS record deleted', { domain: oldDomain });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
results.oldDns = `failed: ${error.message}`;
|
results.oldDns = `failed: ${error.message}`;
|
||||||
ctx.log.warn('dns', 'Old DNS deletion warning', { error: error.message });
|
log.warn('dns', 'Old DNS deletion warning', { error: error.message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -85,22 +93,22 @@ module.exports = function(ctx, helpers) {
|
|||||||
try {
|
try {
|
||||||
await ctx.dns.createRecord(newSubdomain, ip || 'localhost');
|
await ctx.dns.createRecord(newSubdomain, ip || 'localhost');
|
||||||
results.newDns = 'created';
|
results.newDns = 'created';
|
||||||
ctx.log.info('dns', 'New DNS record created', { domain: ctx.buildDomain(newSubdomain) });
|
log.info('dns', 'New DNS record created', { domain: ctx.buildDomain(newSubdomain) });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
results.newDns = `failed: ${error.message}`;
|
results.newDns = `failed: ${error.message}`;
|
||||||
ctx.log.warn('dns', 'New DNS creation warning', { error: error.message });
|
log.warn('dns', 'New DNS creation warning', { error: error.message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (await exists(ctx.caddy.filePath)) {
|
if (await exists(caddy.filePath)) {
|
||||||
const oldDomain = oldSubdomain.includes('.') ? oldSubdomain : ctx.buildDomain(oldSubdomain);
|
const oldDomain = oldSubdomain.includes('.') ? oldSubdomain : ctx.buildDomain(oldSubdomain);
|
||||||
const newDomain = newSubdomain.includes('.') ? newSubdomain : ctx.buildDomain(newSubdomain);
|
const newDomain = newSubdomain.includes('.') ? newSubdomain : ctx.buildDomain(newSubdomain);
|
||||||
const escapedOld = oldDomain.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
const escapedOld = oldDomain.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||||
const oldBlockRegex = new RegExp(`${escapedOld}(?::\\d+)?\\s*\\{[^{}]*(?:\\{[^{}]*(?:\\{[^{}]*\\}[^{}]*)*\\}[^{}]*)*\\}`, 'g');
|
const oldBlockRegex = new RegExp(`${escapedOld}(?::\\d+)?\\s*\\{[^{}]*(?:\\{[^{}]*(?:\\{[^{}]*\\}[^{}]*)*\\}[^{}]*)*\\}`, 'g');
|
||||||
const content = await ctx.caddy.read();
|
const content = await caddy.read();
|
||||||
if (oldBlockRegex.test(content)) {
|
if (oldBlockRegex.test(content)) {
|
||||||
const caddyResult = await ctx.caddy.modify(c => {
|
const caddyResult = await caddy.modify(c => {
|
||||||
const re = new RegExp(`${escapedOld}(?::\\d+)?\\s*\\{[^{}]*(?:\\{[^{}]*(?:\\{[^{}]*\\}[^{}]*)*\\}[^{}]*)*\\}`, 'g');
|
const re = new RegExp(`${escapedOld}(?::\\d+)?\\s*\\{[^{}]*(?:\\{[^{}]*(?:\\{[^{}]*\\}[^{}]*)*\\}[^{}]*)*\\}`, 'g');
|
||||||
return c.replace(re, match => match.replace(oldDomain, newDomain));
|
return c.replace(re, match => match.replace(oldDomain, newDomain));
|
||||||
});
|
});
|
||||||
@@ -113,17 +121,17 @@ module.exports = function(ctx, helpers) {
|
|||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
results.caddy = `failed: ${error.message}`;
|
results.caddy = `failed: ${error.message}`;
|
||||||
ctx.log.error('caddy', 'Caddy update error', { error: error.message });
|
log.error('caddy', 'Caddy update error', { error: error.message });
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (await exists(ctx.SERVICES_FILE)) {
|
if (await exists(ctx.SERVICES_FILE)) {
|
||||||
await ctx.servicesStateManager.update(services => {
|
await servicesStateManager.update(services => {
|
||||||
const serviceIndex = services.findIndex(s => s.id === oldSubdomain || s.id === serviceId);
|
const serviceIndex = services.findIndex(s => s.id === oldSubdomain || s.id === serviceId);
|
||||||
if (serviceIndex !== -1) {
|
if (serviceIndex !== -1) {
|
||||||
services[serviceIndex].id = newSubdomain;
|
services[serviceIndex].id = newSubdomain;
|
||||||
results.service = 'updated';
|
results.service = 'updated';
|
||||||
ctx.log.info('deploy', 'Service config updated in services.json');
|
log.info('deploy', 'Service config updated in services.json');
|
||||||
} else {
|
} else {
|
||||||
results.service = 'not found';
|
results.service = 'not found';
|
||||||
}
|
}
|
||||||
@@ -132,7 +140,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
results.service = `failed: ${error.message}`;
|
results.service = `failed: ${error.message}`;
|
||||||
ctx.log.warn('deploy', 'Service update warning', { error: error.message || String(error) });
|
log.warn('deploy', 'Service update warning', { error: error.message || String(error) });
|
||||||
}
|
}
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
|
|||||||
@@ -1,13 +1,33 @@
|
|||||||
const express = require('express');
|
const express = require('express');
|
||||||
const { APP_PORTS, ARR_SERVICES } = require('../../constants');
|
const { APP_PORTS, ARR_SERVICES } = require('../../constants');
|
||||||
const { validateURL, validateToken } = require('../../input-validator');
|
const { validateURL, validateToken } = require('../../input-validator');
|
||||||
const { ValidationError, AuthenticationError, NotFoundError } = require('../errors');
|
const { ValidationError, AuthenticationError, NotFoundError } = require('../../errors');
|
||||||
|
const { logError } = require('../../src/utils/logging');
|
||||||
|
|
||||||
module.exports = function(ctx, helpers) {
|
/**
|
||||||
|
* Arr configuration routes factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.credentialManager - Credential manager
|
||||||
|
* @param {Object} deps.servicesStateManager - Services state manager
|
||||||
|
* @param {Object} deps.docker - Docker client wrapper
|
||||||
|
* @param {Function} deps.fetchT - Timeout-wrapped fetch
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Function} deps.errorResponse - Error response helper
|
||||||
|
* @param {Object} deps.log - Logger instance
|
||||||
|
* @param {Object} deps.helpers - Arr helpers module
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
module.exports = function({ credentialManager, servicesStateManager, docker, fetchT, asyncHandler, errorResponse, log, helpers, notification, safeErrorMessage }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
|
// Ctx shim for backward compatibility
|
||||||
|
const ctx = {
|
||||||
|
notification,
|
||||||
|
safeErrorMessage
|
||||||
|
};
|
||||||
|
|
||||||
// Auto-configure Overseerr with detected services
|
// Auto-configure Overseerr with detected services
|
||||||
router.post('/arr/configure-overseerr', ctx.asyncHandler(async (req, res) => {
|
router.post('/arr/configure-overseerr', asyncHandler(async (req, res) => {
|
||||||
const { radarr, sonarr } = req.body;
|
const { radarr, sonarr } = req.body;
|
||||||
const results = { radarr: null, sonarr: null };
|
const results = { radarr: null, sonarr: null };
|
||||||
|
|
||||||
@@ -16,17 +36,17 @@ module.exports = function(ctx, helpers) {
|
|||||||
const overseerrSession = await helpers.getOverseerrSession();
|
const overseerrSession = await helpers.getOverseerrSession();
|
||||||
|
|
||||||
if (!overseerrSession) {
|
if (!overseerrSession) {
|
||||||
return ctx.errorResponse(res, 502, 'Could not authenticate with Overseerr. Make sure Plex and Overseerr are running.', {
|
return errorResponse(res, 502, 'Could not authenticate with Overseerr. Make sure Plex and Overseerr are running.', {
|
||||||
hint: 'Complete Overseerr setup wizard and link your Plex account first, then try again.'
|
hint: 'Complete Overseerr setup wizard and link your Plex account first, then try again.'
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.log.info('arr', 'Authenticated with Overseerr via Plex session');
|
log.info('arr', 'Authenticated with Overseerr via Plex session');
|
||||||
|
|
||||||
// Helper to make authenticated requests to Overseerr
|
// Helper to make authenticated requests to Overseerr
|
||||||
const overseerrFetch = async (endpoint, options = {}) => {
|
const overseerrFetch = async (endpoint, options = {}) => {
|
||||||
const url = `${overseerrUrl}${endpoint}`;
|
const url = `${overseerrUrl}${endpoint}`;
|
||||||
const response = await ctx.fetchT(url, {
|
const response = await fetchT(url, {
|
||||||
...options,
|
...options,
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
@@ -41,12 +61,12 @@ module.exports = function(ctx, helpers) {
|
|||||||
try {
|
try {
|
||||||
const statusRes = await overseerrFetch('/api/v1/status');
|
const statusRes = await overseerrFetch('/api/v1/status');
|
||||||
if (!statusRes.ok) {
|
if (!statusRes.ok) {
|
||||||
return ctx.errorResponse(res, 502, 'Cannot connect to Overseerr', {
|
return errorResponse(res, 502, 'Cannot connect to Overseerr', {
|
||||||
hint: 'Make sure Overseerr is running on port 5055'
|
hint: 'Make sure Overseerr is running on port 5055'
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
return ctx.errorResponse(res, 502, `Cannot reach Overseerr: ${e.message}`, {
|
return errorResponse(res, 502, `Cannot reach Overseerr: ${e.message}`, {
|
||||||
hint: 'Check if Overseerr container is running'
|
hint: 'Check if Overseerr container is running'
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -59,20 +79,20 @@ module.exports = function(ctx, helpers) {
|
|||||||
const radarrBaseUrl = radarr.url.replace(/\/+$/, '');
|
const radarrBaseUrl = radarr.url.replace(/\/+$/, '');
|
||||||
|
|
||||||
// Fetch quality profiles from Radarr
|
// Fetch quality profiles from Radarr
|
||||||
const profilesRes = await ctx.fetchT(`${radarrBaseUrl}/api/v3/qualityprofile`, {
|
const profilesRes = await fetchT(`${radarrBaseUrl}/api/v3/qualityprofile`, {
|
||||||
headers: { 'X-Api-Key': radarr.apiKey }
|
headers: { 'X-Api-Key': radarr.apiKey }
|
||||||
});
|
});
|
||||||
const profiles = profilesRes.ok ? await profilesRes.json() : [];
|
const profiles = profilesRes.ok ? await profilesRes.json() : [];
|
||||||
const defaultProfile = profiles[0] || { id: 1, name: 'Any' };
|
const defaultProfile = profiles[0] || { id: 1, name: 'Any' };
|
||||||
|
|
||||||
// Fetch root folders from Radarr
|
// Fetch root folders from Radarr
|
||||||
const rootFoldersRes = await ctx.fetchT(`${radarrBaseUrl}/api/v3/rootfolder`, {
|
const rootFoldersRes = await fetchT(`${radarrBaseUrl}/api/v3/rootfolder`, {
|
||||||
headers: { 'X-Api-Key': radarr.apiKey }
|
headers: { 'X-Api-Key': radarr.apiKey }
|
||||||
});
|
});
|
||||||
const rootFolders = rootFoldersRes.ok ? await rootFoldersRes.json() : [];
|
const rootFolders = rootFoldersRes.ok ? await rootFoldersRes.json() : [];
|
||||||
const defaultRootFolder = rootFolders[0]?.path || '/movies';
|
const defaultRootFolder = rootFolders[0]?.path || '/movies';
|
||||||
|
|
||||||
ctx.log.info('arr', 'Radarr configured', { profile: defaultProfile.name, profileId: defaultProfile.id, rootFolder: defaultRootFolder });
|
log.info('arr', 'Radarr configured', { profile: defaultProfile.name, profileId: defaultProfile.id, rootFolder: defaultRootFolder });
|
||||||
|
|
||||||
const radarrConfig = {
|
const radarrConfig = {
|
||||||
name: 'Radarr',
|
name: 'Radarr',
|
||||||
@@ -115,14 +135,14 @@ module.exports = function(ctx, helpers) {
|
|||||||
const sonarrBaseUrl = sonarr.url.replace(/\/+$/, '');
|
const sonarrBaseUrl = sonarr.url.replace(/\/+$/, '');
|
||||||
|
|
||||||
// Fetch quality profiles from Sonarr
|
// Fetch quality profiles from Sonarr
|
||||||
const profilesRes = await ctx.fetchT(`${sonarrBaseUrl}/api/v3/qualityprofile`, {
|
const profilesRes = await fetchT(`${sonarrBaseUrl}/api/v3/qualityprofile`, {
|
||||||
headers: { 'X-Api-Key': sonarr.apiKey }
|
headers: { 'X-Api-Key': sonarr.apiKey }
|
||||||
});
|
});
|
||||||
const profiles = profilesRes.ok ? await profilesRes.json() : [];
|
const profiles = profilesRes.ok ? await profilesRes.json() : [];
|
||||||
const defaultProfile = profiles[0] || { id: 1, name: 'Any' };
|
const defaultProfile = profiles[0] || { id: 1, name: 'Any' };
|
||||||
|
|
||||||
// Fetch root folders from Sonarr
|
// Fetch root folders from Sonarr
|
||||||
const rootFoldersRes = await ctx.fetchT(`${sonarrBaseUrl}/api/v3/rootfolder`, {
|
const rootFoldersRes = await fetchT(`${sonarrBaseUrl}/api/v3/rootfolder`, {
|
||||||
headers: { 'X-Api-Key': sonarr.apiKey }
|
headers: { 'X-Api-Key': sonarr.apiKey }
|
||||||
});
|
});
|
||||||
const rootFolders = rootFoldersRes.ok ? await rootFoldersRes.json() : [];
|
const rootFolders = rootFoldersRes.ok ? await rootFoldersRes.json() : [];
|
||||||
@@ -131,7 +151,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
// Fetch language profiles from Sonarr (v3 uses languageprofile, v4 doesn't need it)
|
// Fetch language profiles from Sonarr (v3 uses languageprofile, v4 doesn't need it)
|
||||||
let languageProfileId = 1;
|
let languageProfileId = 1;
|
||||||
try {
|
try {
|
||||||
const langRes = await ctx.fetchT(`${sonarrBaseUrl}/api/v3/languageprofile`, {
|
const langRes = await fetchT(`${sonarrBaseUrl}/api/v3/languageprofile`, {
|
||||||
headers: { 'X-Api-Key': sonarr.apiKey }
|
headers: { 'X-Api-Key': sonarr.apiKey }
|
||||||
});
|
});
|
||||||
if (langRes.ok) {
|
if (langRes.ok) {
|
||||||
@@ -142,7 +162,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
// Language profiles might not exist in Sonarr v4
|
// Language profiles might not exist in Sonarr v4
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.log.info('arr', 'Sonarr configured', { profile: defaultProfile.name, profileId: defaultProfile.id, rootFolder: defaultRootFolder });
|
log.info('arr', 'Sonarr configured', { profile: defaultProfile.name, profileId: defaultProfile.id, rootFolder: defaultRootFolder });
|
||||||
|
|
||||||
const sonarrConfig = {
|
const sonarrConfig = {
|
||||||
name: 'Sonarr',
|
name: 'Sonarr',
|
||||||
@@ -188,7 +208,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
}, 'arr-configure-overseerr'));
|
}, 'arr-configure-overseerr'));
|
||||||
|
|
||||||
// Test connection to external Radarr/Sonarr service
|
// Test connection to external Radarr/Sonarr service
|
||||||
router.post('/arr/test-connection', ctx.asyncHandler(async (req, res) => {
|
router.post('/arr/test-connection', asyncHandler(async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const { service, url, apiKey } = req.body;
|
const { service, url, apiKey } = req.body;
|
||||||
|
|
||||||
@@ -200,7 +220,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
try {
|
try {
|
||||||
validateURL(url);
|
validateURL(url);
|
||||||
} catch (validationErr) {
|
} catch (validationErr) {
|
||||||
return ctx.errorResponse(res, 400, validationErr.message);
|
return errorResponse(res, 400, validationErr.message);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate API key format
|
// Validate API key format
|
||||||
@@ -225,13 +245,13 @@ module.exports = function(ctx, helpers) {
|
|||||||
apiEndpoint = `${baseUrl}/identity`;
|
apiEndpoint = `${baseUrl}/identity`;
|
||||||
headers = { 'X-Plex-Token': apiKey, 'Accept': 'application/json' };
|
headers = { 'X-Plex-Token': apiKey, 'Accept': 'application/json' };
|
||||||
} else {
|
} else {
|
||||||
return ctx.errorResponse(res, 400, `Unknown service: ${service}`);
|
return errorResponse(res, 400, `Unknown service: ${service}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.log.info('arr', 'Testing service connection', { service });
|
log.info('arr', 'Testing service connection', { service });
|
||||||
|
|
||||||
// Make the API call
|
// Make the API call
|
||||||
const response = await ctx.fetchT(apiEndpoint, {
|
const response = await fetchT(apiEndpoint, {
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
headers,
|
headers,
|
||||||
signal: AbortSignal.timeout(10000)
|
signal: AbortSignal.timeout(10000)
|
||||||
@@ -241,7 +261,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
const version = service === 'plex' ? data.MediaContainer?.version : data.version;
|
const version = service === 'plex' ? data.MediaContainer?.version : data.version;
|
||||||
const appName = service === 'plex' ? 'Plex' : data.appName;
|
const appName = service === 'plex' ? 'Plex' : data.appName;
|
||||||
ctx.log.info('arr', 'Service connection successful', { service, appName, version });
|
log.info('arr', 'Service connection successful', { service, appName, version });
|
||||||
return res.json({
|
return res.json({
|
||||||
success: true,
|
success: true,
|
||||||
version,
|
version,
|
||||||
@@ -252,25 +272,25 @@ module.exports = function(ctx, helpers) {
|
|||||||
} else if (response.status === 404) {
|
} else if (response.status === 404) {
|
||||||
throw new NotFoundError('API not found - check URL');
|
throw new NotFoundError('API not found - check URL');
|
||||||
} else {
|
} else {
|
||||||
return ctx.errorResponse(res, 502, `HTTP ${response.status}`);
|
return errorResponse(res, 502, `HTTP ${response.status}`);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
await ctx.logError('arr-test-connection', error);
|
await logError('arr-test-connection', error);
|
||||||
if (error.cause?.code === 'ECONNREFUSED') {
|
if (error.cause?.code === 'ECONNREFUSED') {
|
||||||
return ctx.errorResponse(res, 502, 'Connection refused');
|
return errorResponse(res, 502, 'Connection refused');
|
||||||
} else if (error.name === 'AbortError' || error.message?.includes('timeout')) {
|
} else if (error.name === 'AbortError' || error.message?.includes('timeout')) {
|
||||||
return ctx.errorResponse(res, 504, 'Connection timeout');
|
return errorResponse(res, 504, 'Connection timeout');
|
||||||
}
|
}
|
||||||
return ctx.errorResponse(res, 500, ctx.safeErrorMessage(error));
|
return errorResponse(res, 500, ctx.safeErrorMessage(error));
|
||||||
}
|
}
|
||||||
}, 'arr-test-connection'));
|
}, 'arr-test-connection'));
|
||||||
|
|
||||||
// Quick setup: Detect all services and configure Overseerr automatically
|
// Quick setup: Detect all services and configure Overseerr automatically
|
||||||
router.post('/arr/auto-setup', ctx.asyncHandler(async (req, res) => {
|
router.post('/arr/auto-setup', asyncHandler(async (req, res) => {
|
||||||
ctx.log.info('arr', 'Starting arr auto-setup');
|
log.info('arr', 'Starting arr auto-setup');
|
||||||
|
|
||||||
// Step 1: Detect all running arr services
|
// Step 1: Detect all running arr services
|
||||||
const containers = await ctx.docker.client.listContainers({ all: false });
|
const containers = await docker.client.listContainers({ all: false });
|
||||||
const detected = {};
|
const detected = {};
|
||||||
|
|
||||||
const servicePatterns = ARR_SERVICES;
|
const servicePatterns = ARR_SERVICES;
|
||||||
@@ -309,17 +329,17 @@ module.exports = function(ctx, helpers) {
|
|||||||
prowlarrFound: !!detected.prowlarr?.apiKey
|
prowlarrFound: !!detected.prowlarr?.apiKey
|
||||||
};
|
};
|
||||||
|
|
||||||
ctx.log.info('arr', 'Detected services', summary);
|
log.info('arr', 'Detected services', summary);
|
||||||
|
|
||||||
if (!summary.overseerrFound) {
|
if (!summary.overseerrFound) {
|
||||||
return ctx.errorResponse(res, 400, 'Overseerr is not running. Deploy it first.', {
|
return errorResponse(res, 400, 'Overseerr is not running. Deploy it first.', {
|
||||||
detected,
|
detected,
|
||||||
summary
|
summary
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!summary.radarrFound && !summary.sonarrFound) {
|
if (!summary.radarrFound && !summary.sonarrFound) {
|
||||||
return ctx.errorResponse(res, 400, 'No Radarr or Sonarr found with valid API keys. Deploy at least one first.', {
|
return errorResponse(res, 400, 'No Radarr or Sonarr found with valid API keys. Deploy at least one first.', {
|
||||||
detected,
|
detected,
|
||||||
summary
|
summary
|
||||||
});
|
});
|
||||||
@@ -329,18 +349,18 @@ module.exports = function(ctx, helpers) {
|
|||||||
const overseerrSession = await helpers.getOverseerrSession();
|
const overseerrSession = await helpers.getOverseerrSession();
|
||||||
|
|
||||||
if (!overseerrSession) {
|
if (!overseerrSession) {
|
||||||
return ctx.errorResponse(res, 502, 'Could not authenticate with Overseerr. Make sure Plex and Overseerr are running.', {
|
return errorResponse(res, 502, 'Could not authenticate with Overseerr. Make sure Plex and Overseerr are running.', {
|
||||||
setupUrl: detected.overseerr.localUrl,
|
setupUrl: detected.overseerr.localUrl,
|
||||||
detected,
|
detected,
|
||||||
summary
|
summary
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.log.info('arr', 'Authenticated with Overseerr via Plex session');
|
log.info('arr', 'Authenticated with Overseerr via Plex session');
|
||||||
|
|
||||||
// Helper for authenticated Overseerr requests
|
// Helper for authenticated Overseerr requests
|
||||||
const overseerrFetch = async (endpoint, options = {}) => {
|
const overseerrFetch = async (endpoint, options = {}) => {
|
||||||
return ctx.fetchT(`${detected.overseerr.url}${endpoint}`, {
|
return fetchT(`${detected.overseerr.url}${endpoint}`, {
|
||||||
...options,
|
...options,
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
@@ -356,20 +376,20 @@ module.exports = function(ctx, helpers) {
|
|||||||
if (detected.radarr?.apiKey) {
|
if (detected.radarr?.apiKey) {
|
||||||
try {
|
try {
|
||||||
// Fetch quality profiles from Radarr
|
// Fetch quality profiles from Radarr
|
||||||
const profilesRes = await ctx.fetchT(`${detected.radarr.localUrl}/api/v3/qualityprofile`, {
|
const profilesRes = await fetchT(`${detected.radarr.localUrl}/api/v3/qualityprofile`, {
|
||||||
headers: { 'X-Api-Key': detected.radarr.apiKey }
|
headers: { 'X-Api-Key': detected.radarr.apiKey }
|
||||||
});
|
});
|
||||||
const profiles = profilesRes.ok ? await profilesRes.json() : [];
|
const profiles = profilesRes.ok ? await profilesRes.json() : [];
|
||||||
const defaultProfile = profiles[0] || { id: 1, name: 'Any' };
|
const defaultProfile = profiles[0] || { id: 1, name: 'Any' };
|
||||||
|
|
||||||
// Fetch root folders from Radarr
|
// Fetch root folders from Radarr
|
||||||
const rootFoldersRes = await ctx.fetchT(`${detected.radarr.localUrl}/api/v3/rootfolder`, {
|
const rootFoldersRes = await fetchT(`${detected.radarr.localUrl}/api/v3/rootfolder`, {
|
||||||
headers: { 'X-Api-Key': detected.radarr.apiKey }
|
headers: { 'X-Api-Key': detected.radarr.apiKey }
|
||||||
});
|
});
|
||||||
const rootFolders = rootFoldersRes.ok ? await rootFoldersRes.json() : [];
|
const rootFolders = rootFoldersRes.ok ? await rootFoldersRes.json() : [];
|
||||||
const defaultRootFolder = rootFolders[0]?.path || '/movies';
|
const defaultRootFolder = rootFolders[0]?.path || '/movies';
|
||||||
|
|
||||||
ctx.log.info('arr', 'Radarr profile selected', { profile: defaultProfile.name, rootFolder: defaultRootFolder });
|
log.info('arr', 'Radarr profile selected', { profile: defaultProfile.name, rootFolder: defaultRootFolder });
|
||||||
|
|
||||||
const radarrConfig = {
|
const radarrConfig = {
|
||||||
name: 'Radarr',
|
name: 'Radarr',
|
||||||
@@ -403,14 +423,14 @@ module.exports = function(ctx, helpers) {
|
|||||||
if (detected.sonarr?.apiKey) {
|
if (detected.sonarr?.apiKey) {
|
||||||
try {
|
try {
|
||||||
// Fetch quality profiles from Sonarr
|
// Fetch quality profiles from Sonarr
|
||||||
const profilesRes = await ctx.fetchT(`${detected.sonarr.localUrl}/api/v3/qualityprofile`, {
|
const profilesRes = await fetchT(`${detected.sonarr.localUrl}/api/v3/qualityprofile`, {
|
||||||
headers: { 'X-Api-Key': detected.sonarr.apiKey }
|
headers: { 'X-Api-Key': detected.sonarr.apiKey }
|
||||||
});
|
});
|
||||||
const profiles = profilesRes.ok ? await profilesRes.json() : [];
|
const profiles = profilesRes.ok ? await profilesRes.json() : [];
|
||||||
const defaultProfile = profiles[0] || { id: 1, name: 'Any' };
|
const defaultProfile = profiles[0] || { id: 1, name: 'Any' };
|
||||||
|
|
||||||
// Fetch root folders from Sonarr
|
// Fetch root folders from Sonarr
|
||||||
const rootFoldersRes = await ctx.fetchT(`${detected.sonarr.localUrl}/api/v3/rootfolder`, {
|
const rootFoldersRes = await fetchT(`${detected.sonarr.localUrl}/api/v3/rootfolder`, {
|
||||||
headers: { 'X-Api-Key': detected.sonarr.apiKey }
|
headers: { 'X-Api-Key': detected.sonarr.apiKey }
|
||||||
});
|
});
|
||||||
const rootFolders = rootFoldersRes.ok ? await rootFoldersRes.json() : [];
|
const rootFolders = rootFoldersRes.ok ? await rootFoldersRes.json() : [];
|
||||||
@@ -419,7 +439,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
// Fetch language profiles (Sonarr v3)
|
// Fetch language profiles (Sonarr v3)
|
||||||
let languageProfileId = 1;
|
let languageProfileId = 1;
|
||||||
try {
|
try {
|
||||||
const langRes = await ctx.fetchT(`${detected.sonarr.localUrl}/api/v3/languageprofile`, {
|
const langRes = await fetchT(`${detected.sonarr.localUrl}/api/v3/languageprofile`, {
|
||||||
headers: { 'X-Api-Key': detected.sonarr.apiKey }
|
headers: { 'X-Api-Key': detected.sonarr.apiKey }
|
||||||
});
|
});
|
||||||
if (langRes.ok) {
|
if (langRes.ok) {
|
||||||
@@ -428,7 +448,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
}
|
}
|
||||||
} catch (e) { /* Sonarr v4 doesn't need this */ }
|
} catch (e) { /* Sonarr v4 doesn't need this */ }
|
||||||
|
|
||||||
ctx.log.info('arr', 'Sonarr profile selected', { profile: defaultProfile.name, rootFolder: defaultRootFolder });
|
log.info('arr', 'Sonarr profile selected', { profile: defaultProfile.name, rootFolder: defaultRootFolder });
|
||||||
|
|
||||||
const sonarrConfig = {
|
const sonarrConfig = {
|
||||||
name: 'Sonarr',
|
name: 'Sonarr',
|
||||||
@@ -481,7 +501,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
}, 'arr-auto-setup'));
|
}, 'arr-auto-setup'));
|
||||||
|
|
||||||
// Fetch quality profiles from an arr service (Radarr/Sonarr)
|
// Fetch quality profiles from an arr service (Radarr/Sonarr)
|
||||||
router.get('/arr/quality-profiles', ctx.asyncHandler(async (req, res) => {
|
router.get('/arr/quality-profiles', asyncHandler(async (req, res) => {
|
||||||
const { service, url, apiKey } = req.query;
|
const { service, url, apiKey } = req.query;
|
||||||
|
|
||||||
if (!service || !['radarr', 'sonarr'].includes(service)) {
|
if (!service || !['radarr', 'sonarr'].includes(service)) {
|
||||||
@@ -493,19 +513,19 @@ module.exports = function(ctx, helpers) {
|
|||||||
let resolvedUrl = url;
|
let resolvedUrl = url;
|
||||||
|
|
||||||
if (!resolvedKey) {
|
if (!resolvedKey) {
|
||||||
resolvedKey = await ctx.credentialManager.retrieve(`arr.${service}.apikey`);
|
resolvedKey = await credentialManager.retrieve(`arr.${service}.apikey`);
|
||||||
}
|
}
|
||||||
if (!resolvedKey) {
|
if (!resolvedKey) {
|
||||||
resolvedKey = await ctx.credentialManager.retrieve(`service.${service}.apikey`);
|
resolvedKey = await credentialManager.retrieve(`service.${service}.apikey`);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!resolvedUrl) {
|
if (!resolvedUrl) {
|
||||||
const metadata = await ctx.credentialManager.getMetadata(`arr.${service}.apikey`);
|
const metadata = await credentialManager.getMetadata(`arr.${service}.apikey`);
|
||||||
resolvedUrl = metadata?.url;
|
resolvedUrl = metadata?.url;
|
||||||
}
|
}
|
||||||
if (!resolvedUrl) {
|
if (!resolvedUrl) {
|
||||||
try {
|
try {
|
||||||
const services = await ctx.servicesStateManager.read();
|
const services = await servicesStateManager.read();
|
||||||
const svcList = Array.isArray(services) ? services : services.services || [];
|
const svcList = Array.isArray(services) ? services : services.services || [];
|
||||||
const found = svcList.find(s => s.id === service);
|
const found = svcList.find(s => s.id === service);
|
||||||
if (found?.externalUrl) resolvedUrl = found.externalUrl;
|
if (found?.externalUrl) resolvedUrl = found.externalUrl;
|
||||||
@@ -520,13 +540,13 @@ module.exports = function(ctx, helpers) {
|
|||||||
const baseUrl = resolvedUrl.replace(/\/+$/, '');
|
const baseUrl = resolvedUrl.replace(/\/+$/, '');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const profilesRes = await ctx.fetchT(`${baseUrl}/api/v3/qualityprofile`, {
|
const profilesRes = await fetchT(`${baseUrl}/api/v3/qualityprofile`, {
|
||||||
headers: { 'X-Api-Key': resolvedKey, 'Accept': 'application/json' },
|
headers: { 'X-Api-Key': resolvedKey, 'Accept': 'application/json' },
|
||||||
signal: AbortSignal.timeout(10000)
|
signal: AbortSignal.timeout(10000)
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!profilesRes.ok) {
|
if (!profilesRes.ok) {
|
||||||
return ctx.errorResponse(res, profilesRes.status === 401 ? 401 : 502,
|
return errorResponse(res, profilesRes.status === 401 ? 401 : 502,
|
||||||
profilesRes.status === 401 ? 'Invalid API key' : `Failed to fetch profiles (HTTP ${profilesRes.status})`);
|
profilesRes.status === 401 ? 'Invalid API key' : `Failed to fetch profiles (HTTP ${profilesRes.status})`);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -534,23 +554,23 @@ module.exports = function(ctx, helpers) {
|
|||||||
const mapped = profiles.map(p => ({ id: p.id, name: p.name }));
|
const mapped = profiles.map(p => ({ id: p.id, name: p.name }));
|
||||||
|
|
||||||
// Load stored profile preference
|
// Load stored profile preference
|
||||||
const metadata = await ctx.credentialManager.getMetadata(`arr.${service}.apikey`);
|
const metadata = await credentialManager.getMetadata(`arr.${service}.apikey`);
|
||||||
const storedProfileId = metadata?.qualityProfileId || null;
|
const storedProfileId = metadata?.qualityProfileId || null;
|
||||||
|
|
||||||
res.json({ success: true, profiles: mapped, storedProfileId });
|
res.json({ success: true, profiles: mapped, storedProfileId });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e.cause?.code === 'ECONNREFUSED') {
|
if (e.cause?.code === 'ECONNREFUSED') {
|
||||||
return ctx.errorResponse(res, 502, 'Connection refused — is the service running?');
|
return errorResponse(res, 502, 'Connection refused — is the service running?');
|
||||||
}
|
}
|
||||||
if (e.name === 'AbortError') {
|
if (e.name === 'AbortError') {
|
||||||
return ctx.errorResponse(res, 504, 'Connection timeout');
|
return errorResponse(res, 504, 'Connection timeout');
|
||||||
}
|
}
|
||||||
return ctx.errorResponse(res, 500, e.message);
|
return errorResponse(res, 500, e.message);
|
||||||
}
|
}
|
||||||
}, 'arr-quality-profiles'));
|
}, 'arr-quality-profiles'));
|
||||||
|
|
||||||
// Save quality profile preference (without re-storing API key)
|
// Save quality profile preference (without re-storing API key)
|
||||||
router.post('/arr/quality-profiles', ctx.asyncHandler(async (req, res) => {
|
router.post('/arr/quality-profiles', asyncHandler(async (req, res) => {
|
||||||
const { service, qualityProfileId, qualityProfileName } = req.body;
|
const { service, qualityProfileId, qualityProfileName } = req.body;
|
||||||
|
|
||||||
if (!service || !['radarr', 'sonarr'].includes(service)) {
|
if (!service || !['radarr', 'sonarr'].includes(service)) {
|
||||||
@@ -561,7 +581,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const credKey = `arr.${service}.apikey`;
|
const credKey = `arr.${service}.apikey`;
|
||||||
const existing = await ctx.credentialManager.getMetadata(credKey);
|
const existing = await credentialManager.getMetadata(credKey);
|
||||||
|
|
||||||
if (!existing) {
|
if (!existing) {
|
||||||
throw new NotFoundError('No stored credentials for this service');
|
throw new NotFoundError('No stored credentials for this service');
|
||||||
@@ -570,7 +590,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
// Merge quality profile into existing metadata
|
// Merge quality profile into existing metadata
|
||||||
existing.qualityProfileId = qualityProfileId;
|
existing.qualityProfileId = qualityProfileId;
|
||||||
existing.qualityProfileName = qualityProfileName || null;
|
existing.qualityProfileName = qualityProfileName || null;
|
||||||
await ctx.credentialManager.storeMetadata(credKey, existing);
|
await credentialManager.storeMetadata(credKey, existing);
|
||||||
|
|
||||||
res.json({ success: true, message: `Quality profile updated for ${service}` });
|
res.json({ success: true, message: `Quality profile updated for ${service}` });
|
||||||
}, 'arr-quality-profile-save'));
|
}, 'arr-quality-profile-save'));
|
||||||
|
|||||||
@@ -1,12 +1,23 @@
|
|||||||
const express = require('express');
|
const express = require('express');
|
||||||
const { validateURL, validateToken } = require('../../input-validator');
|
const { validateURL, validateToken } = require('../../input-validator');
|
||||||
const { ValidationError } = require('../errors');
|
const { ValidationError } = require('../../errors');
|
||||||
|
|
||||||
module.exports = function(ctx, helpers) {
|
/**
|
||||||
|
* Arr credentials routes factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.credentialManager - Credential manager
|
||||||
|
* @param {Object} deps.servicesStateManager - Services state manager
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Function} deps.errorResponse - Error response helper
|
||||||
|
* @param {Object} deps.log - Logger instance
|
||||||
|
* @param {Object} deps.helpers - Arr helpers module
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
module.exports = function({ credentialManager, servicesStateManager, asyncHandler, errorResponse, log, helpers }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// Store arr service credentials
|
// Store arr service credentials
|
||||||
router.post('/arr/credentials', ctx.asyncHandler(async (req, res) => {
|
router.post('/arr/credentials', asyncHandler(async (req, res) => {
|
||||||
const { service, apiKey, url, seedboxBaseUrl, qualityProfileId, qualityProfileName } = req.body;
|
const { service, apiKey, url, seedboxBaseUrl, qualityProfileId, qualityProfileName } = req.body;
|
||||||
|
|
||||||
if (!service || !apiKey) {
|
if (!service || !apiKey) {
|
||||||
@@ -15,7 +26,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
|
|
||||||
const validServices = ['radarr', 'sonarr', 'prowlarr', 'lidarr', 'plex'];
|
const validServices = ['radarr', 'sonarr', 'prowlarr', 'lidarr', 'plex'];
|
||||||
if (!validServices.includes(service)) {
|
if (!validServices.includes(service)) {
|
||||||
return ctx.errorResponse(res, 400, `Invalid service. Must be one of: ${validServices.join(', ')}`);
|
return errorResponse(res, 400, `Invalid service. Must be one of: ${validServices.join(', ')}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate API key format
|
// Validate API key format
|
||||||
@@ -50,7 +61,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
if (!resolvedUrl) {
|
if (!resolvedUrl) {
|
||||||
// Try to resolve URL from services.json
|
// Try to resolve URL from services.json
|
||||||
try {
|
try {
|
||||||
const services = await ctx.servicesStateManager.read();
|
const services = await servicesStateManager.read();
|
||||||
const svc = Array.isArray(services) ? services : services.services || [];
|
const svc = Array.isArray(services) ? services : services.services || [];
|
||||||
const found = svc.find(s => s.id === service && s.isExternal);
|
const found = svc.find(s => s.id === service && s.isExternal);
|
||||||
if (found?.externalUrl) resolvedUrl = found.externalUrl;
|
if (found?.externalUrl) resolvedUrl = found.externalUrl;
|
||||||
@@ -73,9 +84,9 @@ module.exports = function(ctx, helpers) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Store the credential
|
// Store the credential
|
||||||
const stored = await ctx.credentialManager.store(credKey, apiKey, metadata);
|
const stored = await credentialManager.store(credKey, apiKey, metadata);
|
||||||
if (!stored) {
|
if (!stored) {
|
||||||
return ctx.errorResponse(res, 500, 'Failed to store credential');
|
return errorResponse(res, 500, 'Failed to store credential');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Optionally store seedbox base URL
|
// Optionally store seedbox base URL
|
||||||
@@ -83,12 +94,12 @@ module.exports = function(ctx, helpers) {
|
|||||||
try { validateURL(seedboxBaseUrl); } catch (e) {
|
try { validateURL(seedboxBaseUrl); } catch (e) {
|
||||||
throw new ValidationError('Invalid seedbox base URL');
|
throw new ValidationError('Invalid seedbox base URL');
|
||||||
}
|
}
|
||||||
await ctx.credentialManager.store('arr.seedbox.baseurl', seedboxBaseUrl, {
|
await credentialManager.store('arr.seedbox.baseurl', seedboxBaseUrl, {
|
||||||
storedAt: new Date().toISOString()
|
storedAt: new Date().toISOString()
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.log.info('arr', 'Stored API key', { service, verified: connectionTest?.success || false });
|
log.info('arr', 'Stored API key', { service, verified: connectionTest?.success || false });
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@@ -99,14 +110,14 @@ module.exports = function(ctx, helpers) {
|
|||||||
}, 'arr-credentials-store'));
|
}, 'arr-credentials-store'));
|
||||||
|
|
||||||
// List stored arr credentials (keys only, not values)
|
// List stored arr credentials (keys only, not values)
|
||||||
router.get('/arr/credentials', ctx.asyncHandler(async (req, res) => {
|
router.get('/arr/credentials', asyncHandler(async (req, res) => {
|
||||||
const services = ['radarr', 'sonarr', 'prowlarr', 'lidarr', 'plex'];
|
const services = ['radarr', 'sonarr', 'prowlarr', 'lidarr', 'plex'];
|
||||||
const credentials = {};
|
const credentials = {};
|
||||||
|
|
||||||
for (const service of services) {
|
for (const service of services) {
|
||||||
const credKey = service === 'plex' ? 'arr.plex.token' : `arr.${service}.apikey`;
|
const credKey = service === 'plex' ? 'arr.plex.token' : `arr.${service}.apikey`;
|
||||||
const hasKey = !!(await ctx.credentialManager.retrieve(credKey));
|
const hasKey = !!(await credentialManager.retrieve(credKey));
|
||||||
const metadata = await ctx.credentialManager.getMetadata(credKey);
|
const metadata = await credentialManager.getMetadata(credKey);
|
||||||
|
|
||||||
credentials[service] = {
|
credentials[service] = {
|
||||||
hasKey,
|
hasKey,
|
||||||
@@ -118,17 +129,17 @@ module.exports = function(ctx, helpers) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get seedbox base URL
|
// Get seedbox base URL
|
||||||
const seedboxBaseUrl = await ctx.credentialManager.retrieve('arr.seedbox.baseurl');
|
const seedboxBaseUrl = await credentialManager.retrieve('arr.seedbox.baseurl');
|
||||||
|
|
||||||
res.json({ success: true, credentials, seedboxBaseUrl: seedboxBaseUrl || null });
|
res.json({ success: true, credentials, seedboxBaseUrl: seedboxBaseUrl || null });
|
||||||
}, 'arr-credentials-list'));
|
}, 'arr-credentials-list'));
|
||||||
|
|
||||||
// Delete stored arr credentials
|
// Delete stored arr credentials
|
||||||
router.delete('/arr/credentials/:service', ctx.asyncHandler(async (req, res) => {
|
router.delete('/arr/credentials/:service', asyncHandler(async (req, res) => {
|
||||||
const { service } = req.params;
|
const { service } = req.params;
|
||||||
const credKey = service === 'plex' ? 'arr.plex.token' : `arr.${service}.apikey`;
|
const credKey = service === 'plex' ? 'arr.plex.token' : `arr.${service}.apikey`;
|
||||||
await ctx.credentialManager.delete(credKey);
|
await credentialManager.delete(credKey);
|
||||||
ctx.log.info('arr', 'Deleted credentials', { service });
|
log.info('arr', 'Deleted credentials', { service });
|
||||||
res.json({ success: true, message: `${service} credentials removed` });
|
res.json({ success: true, message: `${service} credentials removed` });
|
||||||
}, 'arr-credentials-delete'));
|
}, 'arr-credentials-delete'));
|
||||||
|
|
||||||
|
|||||||
@@ -1,12 +1,23 @@
|
|||||||
const express = require('express');
|
const express = require('express');
|
||||||
const { APP_PORTS, ARR_SERVICES } = require('../../constants');
|
const { APP_PORTS, ARR_SERVICES } = require('../../constants');
|
||||||
|
|
||||||
module.exports = function(ctx, helpers) {
|
/**
|
||||||
|
* Arr service detection routes factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.docker - Docker client wrapper
|
||||||
|
* @param {Object} deps.servicesStateManager - Services state manager
|
||||||
|
* @param {Object} deps.credentialManager - Credential manager
|
||||||
|
* @param {Function} deps.fetchT - Timeout-wrapped fetch
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Object} deps.helpers - Arr helpers module
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
module.exports = function({ docker, servicesStateManager, credentialManager, fetchT, asyncHandler, helpers }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// Detect running arr services and their configurations
|
// Detect running arr services and their configurations
|
||||||
router.get('/arr/detect', ctx.asyncHandler(async (req, res) => {
|
router.get('/arr/detect', asyncHandler(async (req, res) => {
|
||||||
const containers = await ctx.docker.client.listContainers({ all: false });
|
const containers = await docker.client.listContainers({ all: false });
|
||||||
const detected = {
|
const detected = {
|
||||||
plex: null,
|
plex: null,
|
||||||
radarr: null,
|
radarr: null,
|
||||||
@@ -64,14 +75,14 @@ module.exports = function(ctx, helpers) {
|
|||||||
}, 'arr-detect'));
|
}, 'arr-detect'));
|
||||||
|
|
||||||
// Smart Detect: Unified discovery of all arr services
|
// Smart Detect: Unified discovery of all arr services
|
||||||
router.get('/arr/smart-detect', ctx.asyncHandler(async (req, res) => {
|
router.get('/arr/smart-detect', asyncHandler(async (req, res) => {
|
||||||
const serviceList = ['plex', 'radarr', 'sonarr', 'prowlarr', 'seerr'];
|
const serviceList = ['plex', 'radarr', 'sonarr', 'prowlarr', 'seerr'];
|
||||||
const defaultPorts = APP_PORTS;
|
const defaultPorts = APP_PORTS;
|
||||||
const result = {};
|
const result = {};
|
||||||
|
|
||||||
// 1. Scan Docker containers
|
// 1. Scan Docker containers
|
||||||
let containers = [];
|
let containers = [];
|
||||||
try { containers = await ctx.docker.client.listContainers({ all: false }); } catch (e) { /* Docker not available */ }
|
try { containers = await docker.client.listContainers({ all: false }); } catch (e) { /* Docker not available */ }
|
||||||
|
|
||||||
const servicePatterns = ARR_SERVICES;
|
const servicePatterns = ARR_SERVICES;
|
||||||
|
|
||||||
@@ -95,18 +106,18 @@ module.exports = function(ctx, helpers) {
|
|||||||
// 2. Load services.json for external entries
|
// 2. Load services.json for external entries
|
||||||
let storedServices = [];
|
let storedServices = [];
|
||||||
try {
|
try {
|
||||||
const data = await ctx.servicesStateManager.read();
|
const data = await servicesStateManager.read();
|
||||||
storedServices = Array.isArray(data) ? data : data.services || [];
|
storedServices = Array.isArray(data) ? data : data.services || [];
|
||||||
} catch (e) { /* ignore */ }
|
} catch (e) { /* ignore */ }
|
||||||
|
|
||||||
// 3. Load stored credentials
|
// 3. Load stored credentials
|
||||||
const storedCreds = {};
|
const storedCreds = {};
|
||||||
const seedboxBaseUrl = await ctx.credentialManager.retrieve('arr.seedbox.baseurl');
|
const seedboxBaseUrl = await credentialManager.retrieve('arr.seedbox.baseurl');
|
||||||
|
|
||||||
for (const svc of serviceList) {
|
for (const svc of serviceList) {
|
||||||
const credKey = svc === 'plex' ? 'arr.plex.token' : `arr.${svc}.apikey`;
|
const credKey = svc === 'plex' ? 'arr.plex.token' : `arr.${svc}.apikey`;
|
||||||
const apiKey = await ctx.credentialManager.retrieve(credKey);
|
const apiKey = await credentialManager.retrieve(credKey);
|
||||||
const metadata = await ctx.credentialManager.getMetadata(credKey);
|
const metadata = await credentialManager.getMetadata(credKey);
|
||||||
if (apiKey) {
|
if (apiKey) {
|
||||||
storedCreds[svc] = { apiKey, metadata };
|
storedCreds[svc] = { apiKey, metadata };
|
||||||
}
|
}
|
||||||
@@ -141,7 +152,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
entry.hasToken = true;
|
entry.hasToken = true;
|
||||||
entry.status = 'connected';
|
entry.status = 'connected';
|
||||||
// Store for later use
|
// Store for later use
|
||||||
await ctx.credentialManager.store('arr.plex.token', token, {
|
await credentialManager.store('arr.plex.token', token, {
|
||||||
service: 'plex', source: 'local', url: entry.url,
|
service: 'plex', source: 'local', url: entry.url,
|
||||||
lastVerified: new Date().toISOString()
|
lastVerified: new Date().toISOString()
|
||||||
});
|
});
|
||||||
@@ -158,7 +169,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
entry.hasApiKey = true;
|
entry.hasApiKey = true;
|
||||||
const configuredServices = { radarr: false, sonarr: false, plex: false };
|
const configuredServices = { radarr: false, sonarr: false, plex: false };
|
||||||
try {
|
try {
|
||||||
const radarrCheck = await ctx.fetchT(`http://host.docker.internal:${dc.port}/api/v1/settings/radarr`, {
|
const radarrCheck = await fetchT(`http://host.docker.internal:${dc.port}/api/v1/settings/radarr`, {
|
||||||
headers: { 'Cookie': session.cookie },
|
headers: { 'Cookie': session.cookie },
|
||||||
signal: AbortSignal.timeout(5000)
|
signal: AbortSignal.timeout(5000)
|
||||||
});
|
});
|
||||||
@@ -168,7 +179,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
}
|
}
|
||||||
} catch (e) { /* ignore */ }
|
} catch (e) { /* ignore */ }
|
||||||
try {
|
try {
|
||||||
const sonarrCheck = await ctx.fetchT(`http://host.docker.internal:${dc.port}/api/v1/settings/sonarr`, {
|
const sonarrCheck = await fetchT(`http://host.docker.internal:${dc.port}/api/v1/settings/sonarr`, {
|
||||||
headers: { 'Cookie': session.cookie },
|
headers: { 'Cookie': session.cookie },
|
||||||
signal: AbortSignal.timeout(5000)
|
signal: AbortSignal.timeout(5000)
|
||||||
});
|
});
|
||||||
@@ -178,7 +189,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
}
|
}
|
||||||
} catch (e) { /* ignore */ }
|
} catch (e) { /* ignore */ }
|
||||||
try {
|
try {
|
||||||
const plexCheck = await ctx.fetchT(`http://host.docker.internal:${dc.port}/api/v1/settings/plex`, {
|
const plexCheck = await fetchT(`http://host.docker.internal:${dc.port}/api/v1/settings/plex`, {
|
||||||
headers: { 'Cookie': session.cookie },
|
headers: { 'Cookie': session.cookie },
|
||||||
signal: AbortSignal.timeout(5000)
|
signal: AbortSignal.timeout(5000)
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,14 +1,23 @@
|
|||||||
const { APP_PORTS } = require('../../constants');
|
const { APP_PORTS } = require('../../constants');
|
||||||
|
|
||||||
module.exports = function(ctx) {
|
/**
|
||||||
|
* Arr helpers factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.docker - Docker client wrapper
|
||||||
|
* @param {Object} deps.credentialManager - Credential manager
|
||||||
|
* @param {Function} deps.fetchT - Timeout-wrapped fetch
|
||||||
|
* @param {Object} deps.log - Logger instance
|
||||||
|
* @returns {Object} Helper functions
|
||||||
|
*/
|
||||||
|
module.exports = function({ docker, credentialManager, fetchT, log }) {
|
||||||
|
|
||||||
// Helper: Extract API key from arr service config.xml
|
// Helper: Extract API key from arr service config.xml
|
||||||
async function getArrApiKey(containerName) {
|
async function getArrApiKey(containerName) {
|
||||||
try {
|
try {
|
||||||
const container = await ctx.docker.findContainer(containerName);
|
const container = await docker.findContainer(containerName);
|
||||||
if (!container) return null;
|
if (!container) return null;
|
||||||
|
|
||||||
const dockerContainer = ctx.docker.client.getContainer(container.Id);
|
const dockerContainer = docker.client.getContainer(container.Id);
|
||||||
const exec = await dockerContainer.exec({
|
const exec = await dockerContainer.exec({
|
||||||
Cmd: ['cat', '/config/config.xml'],
|
Cmd: ['cat', '/config/config.xml'],
|
||||||
AttachStdout: true,
|
AttachStdout: true,
|
||||||
@@ -28,7 +37,7 @@ module.exports = function(ctx) {
|
|||||||
stream.on('error', () => resolve(null));
|
stream.on('error', () => resolve(null));
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
ctx.log.error('docker', 'Failed to get API key', { containerName, error: error.message });
|
log.error('docker', 'Failed to get API key', { containerName, error: error.message });
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -36,14 +45,14 @@ module.exports = function(ctx) {
|
|||||||
// Helper: Get Plex token from container or config
|
// Helper: Get Plex token from container or config
|
||||||
async function getPlexToken(containerName) {
|
async function getPlexToken(containerName) {
|
||||||
try {
|
try {
|
||||||
const containers = await ctx.docker.client.listContainers({ all: false });
|
const containers = await docker.client.listContainers({ all: false });
|
||||||
const container = containers.find(c =>
|
const container = containers.find(c =>
|
||||||
c.Names.some(n => n.toLowerCase().includes(containerName.toLowerCase()) || n.toLowerCase().includes('plex'))
|
c.Names.some(n => n.toLowerCase().includes(containerName.toLowerCase()) || n.toLowerCase().includes('plex'))
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!container) return null;
|
if (!container) return null;
|
||||||
|
|
||||||
const dockerContainer = ctx.docker.client.getContainer(container.Id);
|
const dockerContainer = docker.client.getContainer(container.Id);
|
||||||
const exec = await dockerContainer.exec({
|
const exec = await dockerContainer.exec({
|
||||||
Cmd: ['cat', '/config/Library/Application Support/Plex Media Server/Preferences.xml'],
|
Cmd: ['cat', '/config/Library/Application Support/Plex Media Server/Preferences.xml'],
|
||||||
AttachStdout: true,
|
AttachStdout: true,
|
||||||
@@ -62,7 +71,7 @@ module.exports = function(ctx) {
|
|||||||
stream.on('error', () => resolve(null));
|
stream.on('error', () => resolve(null));
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
ctx.log.error('docker', 'Failed to get Plex token', { error: error.message });
|
log.error('docker', 'Failed to get Plex token', { error: error.message });
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -84,16 +93,16 @@ module.exports = function(ctx) {
|
|||||||
|
|
||||||
// Fall back to stored Plex token in credential manager
|
// Fall back to stored Plex token in credential manager
|
||||||
if (!plexToken) {
|
if (!plexToken) {
|
||||||
plexToken = await ctx.credentialManager.retrieve('arr.plex.token');
|
plexToken = await credentialManager.retrieve('arr.plex.token');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!plexToken) {
|
if (!plexToken) {
|
||||||
ctx.log.error('arr', 'Could not get Plex token for Seerr auth (no container, no stored token)');
|
log.error('arr', 'Could not get Plex token for Seerr auth (no container, no stored token)');
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Authenticate with Seerr via Plex token
|
// Authenticate with Seerr via Plex token
|
||||||
const authRes = await ctx.fetchT(`${seerrUrl}/api/v1/auth/plex`, {
|
const authRes = await fetchT(`${seerrUrl}/api/v1/auth/plex`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify({ authToken: plexToken }),
|
body: JSON.stringify({ authToken: plexToken }),
|
||||||
@@ -101,20 +110,20 @@ module.exports = function(ctx) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (!authRes.ok) {
|
if (!authRes.ok) {
|
||||||
ctx.log.error('arr', 'Seerr Plex auth failed', { status: authRes.status });
|
log.error('arr', 'Seerr Plex auth failed', { status: authRes.status });
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const setCookie = authRes.headers.get('set-cookie');
|
const setCookie = authRes.headers.get('set-cookie');
|
||||||
if (!setCookie) {
|
if (!setCookie) {
|
||||||
ctx.log.error('arr', 'No session cookie returned from Seerr');
|
log.error('arr', 'No session cookie returned from Seerr');
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const sessionCookie = setCookie.split(';')[0];
|
const sessionCookie = setCookie.split(';')[0];
|
||||||
return { cookie: sessionCookie, plexToken };
|
return { cookie: sessionCookie, plexToken };
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.error('arr', 'Could not get Seerr session', { error: e.message });
|
log.error('arr', 'Could not get Seerr session', { error: e.message });
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -123,7 +132,7 @@ module.exports = function(ctx) {
|
|||||||
// Uses session cookie auth (Overseerr requires Plex-based admin session for settings)
|
// Uses session cookie auth (Overseerr requires Plex-based admin session for settings)
|
||||||
async function connectPlexToOverseerr(plexUrl, plexToken, overseerrUrl, sessionCookie) {
|
async function connectPlexToOverseerr(plexUrl, plexToken, overseerrUrl, sessionCookie) {
|
||||||
// 1. Get Plex server identity (for return info)
|
// 1. Get Plex server identity (for return info)
|
||||||
const identityRes = await ctx.fetchT(`${plexUrl}/identity`, {
|
const identityRes = await fetchT(`${plexUrl}/identity`, {
|
||||||
headers: { 'X-Plex-Token': plexToken, 'Accept': 'application/json' },
|
headers: { 'X-Plex-Token': plexToken, 'Accept': 'application/json' },
|
||||||
signal: AbortSignal.timeout(10000)
|
signal: AbortSignal.timeout(10000)
|
||||||
});
|
});
|
||||||
@@ -139,7 +148,7 @@ module.exports = function(ctx) {
|
|||||||
useSsl: false
|
useSsl: false
|
||||||
};
|
};
|
||||||
|
|
||||||
const configRes = await ctx.fetchT(`${overseerrUrl}/api/v1/settings/plex`, {
|
const configRes = await fetchT(`${overseerrUrl}/api/v1/settings/plex`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
@@ -154,19 +163,19 @@ module.exports = function(ctx) {
|
|||||||
|
|
||||||
// 3. Trigger library sync — Overseerr will use the admin's Plex token to discover libraries
|
// 3. Trigger library sync — Overseerr will use the admin's Plex token to discover libraries
|
||||||
try {
|
try {
|
||||||
await ctx.fetchT(`${overseerrUrl}/api/v1/settings/plex/sync`, {
|
await fetchT(`${overseerrUrl}/api/v1/settings/plex/sync`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Cookie': sessionCookie },
|
headers: { 'Cookie': sessionCookie },
|
||||||
signal: AbortSignal.timeout(10000)
|
signal: AbortSignal.timeout(10000)
|
||||||
});
|
});
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('arr', 'Plex library sync trigger failed (non-fatal)', { error: e.message });
|
log.warn('arr', 'Plex library sync trigger failed (non-fatal)', { error: e.message });
|
||||||
}
|
}
|
||||||
|
|
||||||
// 4. Get discovered libraries
|
// 4. Get discovered libraries
|
||||||
let libraries = [];
|
let libraries = [];
|
||||||
try {
|
try {
|
||||||
const libRes = await ctx.fetchT(`${overseerrUrl}/api/v1/settings/plex`, {
|
const libRes = await fetchT(`${overseerrUrl}/api/v1/settings/plex`, {
|
||||||
headers: { 'Cookie': sessionCookie },
|
headers: { 'Cookie': sessionCookie },
|
||||||
signal: AbortSignal.timeout(5000)
|
signal: AbortSignal.timeout(5000)
|
||||||
});
|
});
|
||||||
@@ -186,13 +195,13 @@ module.exports = function(ctx) {
|
|||||||
// Check existing apps to avoid duplicates
|
// Check existing apps to avoid duplicates
|
||||||
let existingApps = [];
|
let existingApps = [];
|
||||||
try {
|
try {
|
||||||
const existingRes = await ctx.fetchT(`${prowlarrUrl}/api/v1/applications`, {
|
const existingRes = await fetchT(`${prowlarrUrl}/api/v1/applications`, {
|
||||||
headers: { 'X-Api-Key': prowlarrApiKey },
|
headers: { 'X-Api-Key': prowlarrApiKey },
|
||||||
signal: AbortSignal.timeout(10000)
|
signal: AbortSignal.timeout(10000)
|
||||||
});
|
});
|
||||||
existingApps = existingRes.ok ? await existingRes.json() : [];
|
existingApps = existingRes.ok ? await existingRes.json() : [];
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('arr', 'Could not fetch existing Prowlarr apps', { error: e.message });
|
log.warn('arr', 'Could not fetch existing Prowlarr apps', { error: e.message });
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const [appName, config] of Object.entries(apps)) {
|
for (const [appName, config] of Object.entries(apps)) {
|
||||||
@@ -222,7 +231,7 @@ module.exports = function(ctx) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const res = await ctx.fetchT(`${prowlarrUrl}/api/v1/applications`, {
|
const res = await fetchT(`${prowlarrUrl}/api/v1/applications`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
@@ -259,7 +268,7 @@ module.exports = function(ctx) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await ctx.fetchT(apiEndpoint, {
|
const response = await fetchT(apiEndpoint, {
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
headers,
|
headers,
|
||||||
signal: AbortSignal.timeout(15000)
|
signal: AbortSignal.timeout(15000)
|
||||||
|
|||||||
@@ -1,14 +1,37 @@
|
|||||||
const express = require('express');
|
const express = require('express');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Arr routes aggregator
|
||||||
|
* Assembles all arr sub-routes with their dependencies
|
||||||
|
* @param {Object} ctx - Application context (for backward compatibility)
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
module.exports = function(ctx) {
|
module.exports = function(ctx) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const helpers = require('./helpers')(ctx);
|
|
||||||
|
|
||||||
router.use(require('./detect')(ctx, helpers));
|
// Extract dependencies from context
|
||||||
router.use(require('./credentials')(ctx, helpers));
|
const deps = {
|
||||||
router.use(require('./config')(ctx, helpers));
|
docker: ctx.docker,
|
||||||
router.use(require('./smart-connect')(ctx, helpers));
|
credentialManager: ctx.credentialManager,
|
||||||
router.use(require('./plex')(ctx, helpers));
|
servicesStateManager: ctx.servicesStateManager,
|
||||||
|
fetchT: ctx.fetchT,
|
||||||
|
asyncHandler: ctx.asyncHandler,
|
||||||
|
errorResponse: ctx.errorResponse,
|
||||||
|
log: ctx.log,
|
||||||
|
// Additional context properties needed by arr routes
|
||||||
|
notification: ctx.notification,
|
||||||
|
safeErrorMessage: ctx.safeErrorMessage
|
||||||
|
};
|
||||||
|
|
||||||
|
// Initialize helpers with dependencies
|
||||||
|
const helpers = require('./helpers')(deps);
|
||||||
|
|
||||||
|
// Mount sub-routes with explicit dependencies
|
||||||
|
router.use(require('./detect')({ ...deps, helpers }));
|
||||||
|
router.use(require('./credentials')({ ...deps, helpers }));
|
||||||
|
router.use(require('./config')({ ...deps, helpers }));
|
||||||
|
router.use(require('./smart-connect')({ ...deps, helpers }));
|
||||||
|
router.use(require('./plex')({ ...deps, helpers }));
|
||||||
|
|
||||||
return router;
|
return router;
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,11 +1,21 @@
|
|||||||
const express = require('express');
|
const express = require('express');
|
||||||
const { APP_PORTS } = require('../../constants');
|
const { APP_PORTS } = require('../../constants');
|
||||||
|
|
||||||
module.exports = function(ctx, helpers) {
|
/**
|
||||||
|
* Plex routes factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Function} deps.fetchT - Timeout-wrapped fetch
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Function} deps.errorResponse - Error response helper
|
||||||
|
* @param {Object} deps.log - Logger instance
|
||||||
|
* @param {Object} deps.helpers - Arr helpers module
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
module.exports = function({ fetchT, asyncHandler, errorResponse, log, helpers }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// Plex Libraries endpoint
|
// Plex Libraries endpoint
|
||||||
router.get('/plex/libraries', ctx.asyncHandler(async (req, res) => {
|
router.get('/plex/libraries', asyncHandler(async (req, res) => {
|
||||||
// Get Plex token
|
// Get Plex token
|
||||||
let plexToken = await helpers.getPlexToken('plex');
|
let plexToken = await helpers.getPlexToken('plex');
|
||||||
if (!plexToken) {
|
if (!plexToken) {
|
||||||
@@ -13,7 +23,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!plexToken) {
|
if (!plexToken) {
|
||||||
return ctx.errorResponse(res, 400, 'No Plex token available. Claim your Plex server first.', {
|
return errorResponse(res, 400, 'No Plex token available. Claim your Plex server first.', {
|
||||||
hint: 'Deploy Plex with a claim token or manually configure it.'
|
hint: 'Deploy Plex with a claim token or manually configure it.'
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -30,13 +40,13 @@ module.exports = function(ctx, helpers) {
|
|||||||
} catch (e) { /* use default */ }
|
} catch (e) { /* use default */ }
|
||||||
|
|
||||||
// Fetch libraries
|
// Fetch libraries
|
||||||
const libRes = await ctx.fetchT(`${plexUrl}/library/sections`, {
|
const libRes = await fetchT(`${plexUrl}/library/sections`, {
|
||||||
headers: { 'X-Plex-Token': plexToken, 'Accept': 'application/json' },
|
headers: { 'X-Plex-Token': plexToken, 'Accept': 'application/json' },
|
||||||
signal: AbortSignal.timeout(10000)
|
signal: AbortSignal.timeout(10000)
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!libRes.ok) {
|
if (!libRes.ok) {
|
||||||
return ctx.errorResponse(res, 502, `Plex returned ${libRes.status}`);
|
return errorResponse(res, 502, `Plex returned ${libRes.status}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const data = await libRes.json();
|
const data = await libRes.json();
|
||||||
@@ -52,7 +62,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
let serverName = 'Plex';
|
let serverName = 'Plex';
|
||||||
let version = null;
|
let version = null;
|
||||||
try {
|
try {
|
||||||
const identityRes = await ctx.fetchT(`${plexUrl}/identity`, {
|
const identityRes = await fetchT(`${plexUrl}/identity`, {
|
||||||
headers: { 'X-Plex-Token': plexToken, 'Accept': 'application/json' },
|
headers: { 'X-Plex-Token': plexToken, 'Accept': 'application/json' },
|
||||||
signal: AbortSignal.timeout(5000)
|
signal: AbortSignal.timeout(5000)
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,11 +1,22 @@
|
|||||||
const express = require('express');
|
const express = require('express');
|
||||||
const { APP_PORTS } = require('../../constants');
|
const { APP_PORTS } = require('../../constants');
|
||||||
|
|
||||||
module.exports = function(ctx, helpers) {
|
/**
|
||||||
|
* Arr smart-connect routes factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.credentialManager - Credential manager
|
||||||
|
* @param {Function} deps.fetchT - Timeout-wrapped fetch
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Function} deps.errorResponse - Error response helper
|
||||||
|
* @param {Object} deps.log - Logger instance
|
||||||
|
* @param {Object} deps.helpers - Arr helpers module
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
module.exports = function({ credentialManager, fetchT, asyncHandler, errorResponse, log, helpers }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// Smart Connect: Unified orchestration endpoint
|
// Smart Connect: Unified orchestration endpoint
|
||||||
router.post('/arr/smart-connect', ctx.asyncHandler(async (req, res) => {
|
router.post('/arr/smart-connect', asyncHandler(async (req, res) => {
|
||||||
const { services: inputServices, configurePlex, configureProwlarr, configureSeerr, saveCredentials } = req.body;
|
const { services: inputServices, configurePlex, configureProwlarr, configureSeerr, saveCredentials } = req.body;
|
||||||
const steps = [];
|
const steps = [];
|
||||||
const connectedServices = {}; // { radarr: { url, apiKey }, sonarr: { url, apiKey }, ... }
|
const connectedServices = {}; // { radarr: { url, apiKey }, sonarr: { url, apiKey }, ... }
|
||||||
@@ -20,9 +31,9 @@ module.exports = function(ctx, helpers) {
|
|||||||
// Fallback to stored credentials
|
// Fallback to stored credentials
|
||||||
if (!apiKey) {
|
if (!apiKey) {
|
||||||
const credKey = `arr.${svc}.apikey`;
|
const credKey = `arr.${svc}.apikey`;
|
||||||
apiKey = await ctx.credentialManager.retrieve(credKey);
|
apiKey = await credentialManager.retrieve(credKey);
|
||||||
if (!url) {
|
if (!url) {
|
||||||
const metadata = await ctx.credentialManager.getMetadata(credKey);
|
const metadata = await credentialManager.getMetadata(credKey);
|
||||||
url = metadata?.url;
|
url = metadata?.url;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -52,7 +63,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
|
|
||||||
// Save credentials
|
// Save credentials
|
||||||
if (saveCredentials) {
|
if (saveCredentials) {
|
||||||
const stored = await ctx.credentialManager.store(`arr.${svc}.apikey`, apiKey, {
|
const stored = await credentialManager.store(`arr.${svc}.apikey`, apiKey, {
|
||||||
service: svc, source: 'external', url,
|
service: svc, source: 'external', url,
|
||||||
lastVerified: new Date().toISOString(),
|
lastVerified: new Date().toISOString(),
|
||||||
version: test.version
|
version: test.version
|
||||||
@@ -71,7 +82,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
let plexUrl = null;
|
let plexUrl = null;
|
||||||
if (configurePlex) {
|
if (configurePlex) {
|
||||||
plexToken = await helpers.getPlexToken('plex');
|
plexToken = await helpers.getPlexToken('plex');
|
||||||
if (!plexToken) plexToken = await ctx.credentialManager.retrieve('arr.plex.token');
|
if (!plexToken) plexToken = await credentialManager.retrieve('arr.plex.token');
|
||||||
|
|
||||||
if (plexToken) {
|
if (plexToken) {
|
||||||
// Get Plex URL
|
// Get Plex URL
|
||||||
@@ -108,14 +119,14 @@ module.exports = function(ctx, helpers) {
|
|||||||
const radarrBasePath = radarrUrlObj.pathname.replace(/\/+$/, '');
|
const radarrBasePath = radarrUrlObj.pathname.replace(/\/+$/, '');
|
||||||
|
|
||||||
// Fetch quality profiles
|
// Fetch quality profiles
|
||||||
const profilesRes = await ctx.fetchT(`${radarrUrl}/api/v3/qualityprofile`, {
|
const profilesRes = await fetchT(`${radarrUrl}/api/v3/qualityprofile`, {
|
||||||
headers: { 'X-Api-Key': connectedServices.radarr.apiKey },
|
headers: { 'X-Api-Key': connectedServices.radarr.apiKey },
|
||||||
signal: AbortSignal.timeout(10000)
|
signal: AbortSignal.timeout(10000)
|
||||||
});
|
});
|
||||||
const profiles = profilesRes.ok ? await profilesRes.json() : [];
|
const profiles = profilesRes.ok ? await profilesRes.json() : [];
|
||||||
|
|
||||||
// Use stored quality profile preference, fallback to first profile
|
// Use stored quality profile preference, fallback to first profile
|
||||||
const radarrMeta = await ctx.credentialManager.getMetadata('arr.radarr.apikey');
|
const radarrMeta = await credentialManager.getMetadata('arr.radarr.apikey');
|
||||||
let defaultProfile = profiles[0] || { id: 1, name: 'Any' };
|
let defaultProfile = profiles[0] || { id: 1, name: 'Any' };
|
||||||
if (radarrMeta?.qualityProfileId) {
|
if (radarrMeta?.qualityProfileId) {
|
||||||
const stored = profiles.find(p => p.id === radarrMeta.qualityProfileId);
|
const stored = profiles.find(p => p.id === radarrMeta.qualityProfileId);
|
||||||
@@ -123,7 +134,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Fetch root folders
|
// Fetch root folders
|
||||||
const rootFoldersRes = await ctx.fetchT(`${radarrUrl}/api/v3/rootfolder`, {
|
const rootFoldersRes = await fetchT(`${radarrUrl}/api/v3/rootfolder`, {
|
||||||
headers: { 'X-Api-Key': connectedServices.radarr.apiKey },
|
headers: { 'X-Api-Key': connectedServices.radarr.apiKey },
|
||||||
signal: AbortSignal.timeout(10000)
|
signal: AbortSignal.timeout(10000)
|
||||||
});
|
});
|
||||||
@@ -151,7 +162,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
tags: []
|
tags: []
|
||||||
};
|
};
|
||||||
|
|
||||||
const radarrRes = await ctx.fetchT(`${overseerrUrl}/api/v1/settings/radarr`, {
|
const radarrRes = await fetchT(`${overseerrUrl}/api/v1/settings/radarr`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json', 'Cookie': overseerrCookie },
|
headers: { 'Content-Type': 'application/json', 'Cookie': overseerrCookie },
|
||||||
body: JSON.stringify(radarrConfig),
|
body: JSON.stringify(radarrConfig),
|
||||||
@@ -175,21 +186,21 @@ module.exports = function(ctx, helpers) {
|
|||||||
const sonarrUrlObj = new URL(sonarrUrl);
|
const sonarrUrlObj = new URL(sonarrUrl);
|
||||||
const sonarrBasePath = sonarrUrlObj.pathname.replace(/\/+$/, '');
|
const sonarrBasePath = sonarrUrlObj.pathname.replace(/\/+$/, '');
|
||||||
|
|
||||||
const profilesRes = await ctx.fetchT(`${sonarrUrl}/api/v3/qualityprofile`, {
|
const profilesRes = await fetchT(`${sonarrUrl}/api/v3/qualityprofile`, {
|
||||||
headers: { 'X-Api-Key': connectedServices.sonarr.apiKey },
|
headers: { 'X-Api-Key': connectedServices.sonarr.apiKey },
|
||||||
signal: AbortSignal.timeout(10000)
|
signal: AbortSignal.timeout(10000)
|
||||||
});
|
});
|
||||||
const profiles = profilesRes.ok ? await profilesRes.json() : [];
|
const profiles = profilesRes.ok ? await profilesRes.json() : [];
|
||||||
|
|
||||||
// Use stored quality profile preference, fallback to first profile
|
// Use stored quality profile preference, fallback to first profile
|
||||||
const sonarrMeta = await ctx.credentialManager.getMetadata('arr.sonarr.apikey');
|
const sonarrMeta = await credentialManager.getMetadata('arr.sonarr.apikey');
|
||||||
let defaultProfile = profiles[0] || { id: 1, name: 'Any' };
|
let defaultProfile = profiles[0] || { id: 1, name: 'Any' };
|
||||||
if (sonarrMeta?.qualityProfileId) {
|
if (sonarrMeta?.qualityProfileId) {
|
||||||
const stored = profiles.find(p => p.id === sonarrMeta.qualityProfileId);
|
const stored = profiles.find(p => p.id === sonarrMeta.qualityProfileId);
|
||||||
if (stored) defaultProfile = stored;
|
if (stored) defaultProfile = stored;
|
||||||
}
|
}
|
||||||
|
|
||||||
const rootFoldersRes = await ctx.fetchT(`${sonarrUrl}/api/v3/rootfolder`, {
|
const rootFoldersRes = await fetchT(`${sonarrUrl}/api/v3/rootfolder`, {
|
||||||
headers: { 'X-Api-Key': connectedServices.sonarr.apiKey },
|
headers: { 'X-Api-Key': connectedServices.sonarr.apiKey },
|
||||||
signal: AbortSignal.timeout(10000)
|
signal: AbortSignal.timeout(10000)
|
||||||
});
|
});
|
||||||
@@ -198,7 +209,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
|
|
||||||
let languageProfileId = 1;
|
let languageProfileId = 1;
|
||||||
try {
|
try {
|
||||||
const langRes = await ctx.fetchT(`${sonarrUrl}/api/v3/languageprofile`, {
|
const langRes = await fetchT(`${sonarrUrl}/api/v3/languageprofile`, {
|
||||||
headers: { 'X-Api-Key': connectedServices.sonarr.apiKey },
|
headers: { 'X-Api-Key': connectedServices.sonarr.apiKey },
|
||||||
signal: AbortSignal.timeout(5000)
|
signal: AbortSignal.timeout(5000)
|
||||||
});
|
});
|
||||||
@@ -229,7 +240,7 @@ module.exports = function(ctx, helpers) {
|
|||||||
tags: []
|
tags: []
|
||||||
};
|
};
|
||||||
|
|
||||||
const sonarrRes = await ctx.fetchT(`${overseerrUrl}/api/v1/settings/sonarr`, {
|
const sonarrRes = await fetchT(`${overseerrUrl}/api/v1/settings/sonarr`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json', 'Cookie': overseerrCookie },
|
headers: { 'Content-Type': 'application/json', 'Cookie': overseerrCookie },
|
||||||
body: JSON.stringify(sonarrConfig),
|
body: JSON.stringify(sonarrConfig),
|
||||||
|
|||||||
@@ -4,14 +4,37 @@ const initKeys = require('./keys');
|
|||||||
const initSessionHandlers = require('./session-handlers');
|
const initSessionHandlers = require('./session-handlers');
|
||||||
const initSsoGate = require('./sso-gate');
|
const initSsoGate = require('./sso-gate');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Auth routes aggregator
|
||||||
|
* Assembles all auth sub-routes with their dependencies
|
||||||
|
* @param {Object} ctx - Application context (for backward compatibility)
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
module.exports = function(ctx) {
|
module.exports = function(ctx) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
const { getAppSession, appSessionCache } = initSessionHandlers(ctx);
|
// Extract dependencies from context
|
||||||
|
const deps = {
|
||||||
|
authManager: ctx.authManager,
|
||||||
|
credentialManager: ctx.credentialManager,
|
||||||
|
totpConfig: ctx.totpConfig,
|
||||||
|
saveTotpConfig: ctx.saveTotpConfig,
|
||||||
|
session: ctx.session,
|
||||||
|
asyncHandler: ctx.asyncHandler,
|
||||||
|
errorResponse: ctx.errorResponse,
|
||||||
|
log: ctx.log,
|
||||||
|
// Additional deps for sso-gate
|
||||||
|
fetchT: ctx.fetchT,
|
||||||
|
getServiceById: ctx.getServiceById,
|
||||||
|
licenseManager: ctx.licenseManager,
|
||||||
|
servicesStateManager: ctx.servicesStateManager
|
||||||
|
};
|
||||||
|
|
||||||
router.use(initTotp(ctx));
|
const { getAppSession, appSessionCache } = initSessionHandlers(deps);
|
||||||
router.use(initKeys(ctx));
|
|
||||||
router.use(initSsoGate(ctx, getAppSession, appSessionCache));
|
router.use(initTotp(deps));
|
||||||
|
router.use(initKeys(deps));
|
||||||
|
router.use(initSsoGate({ ...deps, getAppSession, appSessionCache }));
|
||||||
|
|
||||||
return router;
|
return router;
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,7 +1,15 @@
|
|||||||
const express = require('express');
|
const express = require('express');
|
||||||
const { ValidationError, ForbiddenError, NotFoundError } = require('../../errors');
|
const { ValidationError, ForbiddenError, NotFoundError } = require('../../errors');
|
||||||
|
/**
|
||||||
|
* Auth API keys routes factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.authManager - Auth manager
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Object} deps.log - Logger instance
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
|
||||||
module.exports = function(ctx) {
|
module.exports = function({ authManager, asyncHandler, log }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// Helper function to parse expiration strings to milliseconds
|
// Helper function to parse expiration strings to milliseconds
|
||||||
@@ -24,18 +32,18 @@ module.exports = function(ctx) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// List all API keys
|
// List all API keys
|
||||||
router.get('/auth/keys', ctx.asyncHandler(async (req, res) => {
|
router.get('/auth/keys', asyncHandler(async (req, res) => {
|
||||||
// Require session authentication (not API key - can't manage keys with key itself)
|
// Require session authentication (not API key - can't manage keys with key itself)
|
||||||
if (!req.auth || req.auth.type !== 'session') {
|
if (!req.auth || req.auth.type !== 'session') {
|
||||||
throw new ForbiddenError('API key management requires TOTP session authentication');
|
throw new ForbiddenError('API key management requires TOTP session authentication');
|
||||||
}
|
}
|
||||||
|
|
||||||
const keys = await ctx.authManager.listAPIKeys();
|
const keys = await authManager.listAPIKeys();
|
||||||
res.json({ success: true, keys });
|
res.json({ success: true, keys });
|
||||||
}, 'auth-keys-list'));
|
}, 'auth-keys-list'));
|
||||||
|
|
||||||
// Generate new API key
|
// Generate new API key
|
||||||
router.post('/auth/keys', ctx.asyncHandler(async (req, res) => {
|
router.post('/auth/keys', asyncHandler(async (req, res) => {
|
||||||
// Require session authentication
|
// Require session authentication
|
||||||
if (!req.auth || req.auth.type !== 'session') {
|
if (!req.auth || req.auth.type !== 'session') {
|
||||||
throw new ForbiddenError('API key generation requires TOTP session authentication');
|
throw new ForbiddenError('API key generation requires TOTP session authentication');
|
||||||
@@ -53,7 +61,7 @@ module.exports = function(ctx) {
|
|||||||
throw new ValidationError(`Invalid scopes. Valid options: ${validScopes.join(', ')}`, 'scopes');
|
throw new ValidationError(`Invalid scopes. Valid options: ${validScopes.join(', ')}`, 'scopes');
|
||||||
}
|
}
|
||||||
|
|
||||||
const keyData = await ctx.authManager.generateAPIKey(
|
const keyData = await authManager.generateAPIKey(
|
||||||
name.trim(),
|
name.trim(),
|
||||||
scopes || ['read', 'write']
|
scopes || ['read', 'write']
|
||||||
);
|
);
|
||||||
@@ -70,7 +78,7 @@ module.exports = function(ctx) {
|
|||||||
}, 'auth-keys-generate'));
|
}, 'auth-keys-generate'));
|
||||||
|
|
||||||
// Revoke API key
|
// Revoke API key
|
||||||
router.delete('/auth/keys/:keyId', ctx.asyncHandler(async (req, res) => {
|
router.delete('/auth/keys/:keyId', asyncHandler(async (req, res) => {
|
||||||
// Require session authentication
|
// Require session authentication
|
||||||
if (!req.auth || req.auth.type !== 'session') {
|
if (!req.auth || req.auth.type !== 'session') {
|
||||||
throw new ForbiddenError('API key revocation requires TOTP session authentication');
|
throw new ForbiddenError('API key revocation requires TOTP session authentication');
|
||||||
@@ -82,7 +90,7 @@ module.exports = function(ctx) {
|
|||||||
throw new ValidationError('Key ID is required', 'keyId');
|
throw new ValidationError('Key ID is required', 'keyId');
|
||||||
}
|
}
|
||||||
|
|
||||||
const success = await ctx.authManager.revokeAPIKey(keyId);
|
const success = await authManager.revokeAPIKey(keyId);
|
||||||
|
|
||||||
if (success) {
|
if (success) {
|
||||||
res.json({ success: true, message: 'API key revoked successfully' });
|
res.json({ success: true, message: 'API key revoked successfully' });
|
||||||
@@ -92,7 +100,7 @@ module.exports = function(ctx) {
|
|||||||
}, 'auth-keys-revoke'));
|
}, 'auth-keys-revoke'));
|
||||||
|
|
||||||
// Generate JWT from TOTP session
|
// Generate JWT from TOTP session
|
||||||
router.post('/auth/jwt', ctx.asyncHandler(async (req, res) => {
|
router.post('/auth/jwt', asyncHandler(async (req, res) => {
|
||||||
// Require session authentication
|
// Require session authentication
|
||||||
if (!req.auth || req.auth.type !== 'session') {
|
if (!req.auth || req.auth.type !== 'session') {
|
||||||
throw new ForbiddenError('JWT generation requires TOTP session authentication');
|
throw new ForbiddenError('JWT generation requires TOTP session authentication');
|
||||||
@@ -106,7 +114,7 @@ module.exports = function(ctx) {
|
|||||||
throw new ValidationError('Invalid expiresIn format. Use: 60s, 15m, 24h, 7d, 1y', 'expiresIn');
|
throw new ValidationError('Invalid expiresIn format. Use: 60s, 15m, 24h, 7d, 1y', 'expiresIn');
|
||||||
}
|
}
|
||||||
|
|
||||||
const token = await ctx.authManager.generateJWT(
|
const token = await authManager.generateJWT(
|
||||||
{
|
{
|
||||||
sub: userId || 'dashcaddy-admin',
|
sub: userId || 'dashcaddy-admin',
|
||||||
scope: ['admin'] // Session-generated JWTs have admin scope
|
scope: ['admin'] // Session-generated JWTs have admin scope
|
||||||
|
|||||||
@@ -1,8 +1,18 @@
|
|||||||
const { SESSION_TTL, APP, PLEX, TIMEOUTS, buildMediaAuth } = require('../../constants');
|
const { SESSION_TTL, APP, PLEX, TIMEOUTS, buildMediaAuth } = require('../../constants');
|
||||||
const { createCache, CACHE_CONFIGS } = require('../../cache-config');
|
const { createCache, CACHE_CONFIGS } = require('../../cache-config');
|
||||||
|
|
||||||
module.exports = function(ctx) {
|
module.exports = function({ authManager, credentialManager, asyncHandler, errorResponse, log }) {
|
||||||
// App session cache for auto-login
|
// App session cache for auto-login
|
||||||
|
/**
|
||||||
|
* Auth session handlers routes factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.authManager - Auth manager
|
||||||
|
* @param {Object} deps.credentialManager - Credential manager
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Function} deps.errorResponse - Error response helper
|
||||||
|
* @param {Object} deps.log - Logger instance
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
const appSessionCache = createCache(CACHE_CONFIGS.appSessions);
|
const appSessionCache = createCache(CACHE_CONFIGS.appSessions);
|
||||||
|
|
||||||
async function getAppSession(serviceId, baseUrl, username, password) {
|
async function getAppSession(serviceId, baseUrl, username, password) {
|
||||||
@@ -36,12 +46,12 @@ module.exports = function(ctx) {
|
|||||||
const location = locationMatch ? locationMatch[1].trim() : '';
|
const location = locationMatch ? locationMatch[1].trim() : '';
|
||||||
if (location && !location.includes('login')) {
|
if (location && !location.includes('login')) {
|
||||||
appSessionCache.set(serviceId, { cookies: '__ip_session=1', exp: Date.now() + SESSION_TTL.IP_SESSION });
|
appSessionCache.set(serviceId, { cookies: '__ip_session=1', exp: Date.now() + SESSION_TTL.IP_SESSION });
|
||||||
ctx.log.info('auth', 'Router auto-login successful (IP-based session)', { serviceId });
|
log.info('auth', 'Router auto-login successful (IP-based session)', { serviceId });
|
||||||
return '__ip_session=1';
|
return '__ip_session=1';
|
||||||
}
|
}
|
||||||
ctx.log.warn('auth', 'Router auto-login failed', { serviceId });
|
log.warn('auth', 'Router auto-login failed', { serviceId });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('auth', 'Router auto-login error', { serviceId, error: e.message?.substring(0, 100) });
|
log.warn('auth', 'Router auto-login error', { serviceId, error: e.message?.substring(0, 100) });
|
||||||
}
|
}
|
||||||
appSessionCache.set(serviceId, { failed: true, exp: Date.now() + SESSION_TTL.FAILED_LOGIN });
|
appSessionCache.set(serviceId, { failed: true, exp: Date.now() + SESSION_TTL.FAILED_LOGIN });
|
||||||
return null;
|
return null;
|
||||||
@@ -73,12 +83,12 @@ module.exports = function(ctx) {
|
|||||||
serverId: authData.ServerId, serverName: authData.User?.ServerName || serviceId,
|
serverId: authData.ServerId, serverName: authData.User?.ServerName || serviceId,
|
||||||
};
|
};
|
||||||
appSessionCache.set(serviceId, { cookies: `token=${authData.AccessToken}`, token: authData.AccessToken, tokenData, exp: Date.now() + SESSION_TTL.TOKEN_SESSION });
|
appSessionCache.set(serviceId, { cookies: `token=${authData.AccessToken}`, token: authData.AccessToken, tokenData, exp: Date.now() + SESSION_TTL.TOKEN_SESSION });
|
||||||
ctx.log.info('auth', 'Auto-login successful (token + userId obtained)', { serviceId });
|
log.info('auth', 'Auto-login successful (token + userId obtained)', { serviceId });
|
||||||
return `token=${authData.AccessToken}`;
|
return `token=${authData.AccessToken}`;
|
||||||
}
|
}
|
||||||
ctx.log.warn('auth', 'Auto-login failed', { serviceId, status: authResp.status });
|
log.warn('auth', 'Auto-login failed', { serviceId, status: authResp.status });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('auth', 'Auto-login error', { serviceId, error: e.message });
|
log.warn('auth', 'Auto-login error', { serviceId, error: e.message });
|
||||||
}
|
}
|
||||||
appSessionCache.set(serviceId, { failed: true, exp: Date.now() + SESSION_TTL.FAILED_LOGIN });
|
appSessionCache.set(serviceId, { failed: true, exp: Date.now() + SESSION_TTL.FAILED_LOGIN });
|
||||||
return null;
|
return null;
|
||||||
@@ -99,12 +109,12 @@ module.exports = function(ctx) {
|
|||||||
const token = plexData?.user?.authToken;
|
const token = plexData?.user?.authToken;
|
||||||
if (token) {
|
if (token) {
|
||||||
appSessionCache.set(serviceId, { cookies: `plexToken=${token}`, token, exp: Date.now() + SESSION_TTL.TOKEN_SESSION });
|
appSessionCache.set(serviceId, { cookies: `plexToken=${token}`, token, exp: Date.now() + SESSION_TTL.TOKEN_SESSION });
|
||||||
ctx.log.info('auth', 'Plex auto-login successful via plex.tv', { serviceId });
|
log.info('auth', 'Plex auto-login successful via plex.tv', { serviceId });
|
||||||
return `plexToken=${token}`;
|
return `plexToken=${token}`;
|
||||||
}
|
}
|
||||||
ctx.log.warn('auth', 'Plex auto-login failed: no token in response', { serviceId, status: plexResp.status });
|
log.warn('auth', 'Plex auto-login failed: no token in response', { serviceId, status: plexResp.status });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('auth', 'Plex auto-login error', { serviceId, error: e.message });
|
log.warn('auth', 'Plex auto-login error', { serviceId, error: e.message });
|
||||||
}
|
}
|
||||||
appSessionCache.set(serviceId, { failed: true, exp: Date.now() + SESSION_TTL.FAILED_LOGIN });
|
appSessionCache.set(serviceId, { failed: true, exp: Date.now() + SESSION_TTL.FAILED_LOGIN });
|
||||||
return null;
|
return null;
|
||||||
@@ -129,11 +139,11 @@ module.exports = function(ctx) {
|
|||||||
if (data.token) {
|
if (data.token) {
|
||||||
const cookies = `token=${data.token}`;
|
const cookies = `token=${data.token}`;
|
||||||
appSessionCache.set(serviceId, { cookies, exp: Date.now() + SESSION_TTL.COOKIE_SESSION });
|
appSessionCache.set(serviceId, { cookies, exp: Date.now() + SESSION_TTL.COOKIE_SESSION });
|
||||||
ctx.log.info('auth', 'Auto-login successful (JWT token cached)', { serviceId });
|
log.info('auth', 'Auto-login successful (JWT token cached)', { serviceId });
|
||||||
return cookies;
|
return cookies;
|
||||||
}
|
}
|
||||||
} catch (e) { /* JSON parse failed */ }
|
} catch (e) { /* JSON parse failed */ }
|
||||||
ctx.log.warn('auth', 'Auto-login: no token in response', { serviceId, status: resp.status });
|
log.warn('auth', 'Auto-login: no token in response', { serviceId, status: resp.status });
|
||||||
appSessionCache.set(serviceId, { failed: true, exp: Date.now() + SESSION_TTL.FAILED_LOGIN });
|
appSessionCache.set(serviceId, { failed: true, exp: Date.now() + SESSION_TTL.FAILED_LOGIN });
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@@ -141,7 +151,7 @@ module.exports = function(ctx) {
|
|||||||
if (serviceId === 'torrent') {
|
if (serviceId === 'torrent') {
|
||||||
const text = await resp.text();
|
const text = await resp.text();
|
||||||
if (text.trim() !== 'Ok.') {
|
if (text.trim() !== 'Ok.') {
|
||||||
ctx.log.warn('auth', 'Auto-login failed', { serviceId, response: text.trim() });
|
log.warn('auth', 'Auto-login failed', { serviceId, response: text.trim() });
|
||||||
appSessionCache.set(serviceId, { failed: true, exp: Date.now() + SESSION_TTL.FAILED_LOGIN });
|
appSessionCache.set(serviceId, { failed: true, exp: Date.now() + SESSION_TTL.FAILED_LOGIN });
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@@ -151,7 +161,7 @@ module.exports = function(ctx) {
|
|||||||
if (setCookies.length > 0) {
|
if (setCookies.length > 0) {
|
||||||
const cookies = setCookies.map(c => c.split(';')[0]).join('; ');
|
const cookies = setCookies.map(c => c.split(';')[0]).join('; ');
|
||||||
appSessionCache.set(serviceId, { cookies, exp: Date.now() + SESSION_TTL.COOKIE_SESSION });
|
appSessionCache.set(serviceId, { cookies, exp: Date.now() + SESSION_TTL.COOKIE_SESSION });
|
||||||
ctx.log.info('auth', 'Auto-login successful, session cached', { serviceId, cookieCount: setCookies.length });
|
log.info('auth', 'Auto-login successful, session cached', { serviceId, cookieCount: setCookies.length });
|
||||||
return cookies;
|
return cookies;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -159,14 +169,14 @@ module.exports = function(ctx) {
|
|||||||
if (rawCookie) {
|
if (rawCookie) {
|
||||||
const cookies = rawCookie.split(/,(?=[^ ])/).map(c => c.split(';')[0].trim()).join('; ');
|
const cookies = rawCookie.split(/,(?=[^ ])/).map(c => c.split(';')[0].trim()).join('; ');
|
||||||
appSessionCache.set(serviceId, { cookies, exp: Date.now() + SESSION_TTL.COOKIE_SESSION });
|
appSessionCache.set(serviceId, { cookies, exp: Date.now() + SESSION_TTL.COOKIE_SESSION });
|
||||||
ctx.log.info('auth', 'Auto-login successful (fallback), session cached', { serviceId });
|
log.info('auth', 'Auto-login successful (fallback), session cached', { serviceId });
|
||||||
return cookies;
|
return cookies;
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.log.warn('auth', 'Auto-login: no cookies in response', { serviceId, status: resp.status });
|
log.warn('auth', 'Auto-login: no cookies in response', { serviceId, status: resp.status });
|
||||||
appSessionCache.set(serviceId, { failed: true, exp: Date.now() + SESSION_TTL.FAILED_LOGIN });
|
appSessionCache.set(serviceId, { failed: true, exp: Date.now() + SESSION_TTL.FAILED_LOGIN });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('auth', 'Auto-login error', { serviceId, error: e.message });
|
log.warn('auth', 'Auto-login error', { serviceId, error: e.message });
|
||||||
appSessionCache.set(serviceId, { failed: true, exp: Date.now() + SESSION_TTL.FAILED_LOGIN });
|
appSessionCache.set(serviceId, { failed: true, exp: Date.now() + SESSION_TTL.FAILED_LOGIN });
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
|
|||||||
@@ -1,19 +1,36 @@
|
|||||||
const express = require('express');
|
const express = require('express');
|
||||||
const { SESSION_TTL, APP, PLEX, TIMEOUTS, buildMediaAuth } = require('../../constants');
|
const { SESSION_TTL, APP, PLEX, TIMEOUTS, buildMediaAuth } = require('../../constants');
|
||||||
const { AuthenticationError, NotFoundError } = require('../errors');
|
const { AuthenticationError, NotFoundError } = require('../../errors');
|
||||||
|
|
||||||
module.exports = function(ctx, getAppSession, appSessionCache) {
|
/**
|
||||||
|
* Auth SSO gate routes factory
|
||||||
|
* @param {Object} deps - Explicit dependencies (includes session helpers)
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
module.exports = function(deps) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
|
// Extract dependencies
|
||||||
|
const { authManager, totpConfig, session, asyncHandler, errorResponse, log, getAppSession, appSessionCache, credentialManager, fetchT, getServiceById, licenseManager, servicesStateManager } = deps;
|
||||||
|
|
||||||
|
// Create ctx-like object for compatibility
|
||||||
|
const ctx = {
|
||||||
|
credentialManager,
|
||||||
|
fetchT,
|
||||||
|
getServiceById,
|
||||||
|
licenseManager,
|
||||||
|
servicesStateManager
|
||||||
|
};
|
||||||
|
|
||||||
// Caddy forward_auth gate: checks TOTP session + injects service credentials
|
// Caddy forward_auth gate: checks TOTP session + injects service credentials
|
||||||
router.get('/auth/gate/:serviceId', ctx.asyncHandler(async (req, res) => {
|
router.get('/auth/gate/:serviceId', asyncHandler(async (req, res) => {
|
||||||
res.setHeader('Cache-Control', 'no-store, no-cache, must-revalidate');
|
res.setHeader('Cache-Control', 'no-store, no-cache, must-revalidate');
|
||||||
const serviceId = req.params.serviceId;
|
const serviceId = req.params.serviceId;
|
||||||
|
|
||||||
// Check TOTP session first
|
// Check TOTP session first
|
||||||
if (ctx.totpConfig.enabled && ctx.totpConfig.sessionDuration !== 'never') {
|
if (totpConfig.enabled && totpConfig.sessionDuration !== 'never') {
|
||||||
const valid = ctx.session.isValid(req);
|
const valid = session.isValid(req);
|
||||||
if (!valid) return ctx.errorResponse(res, 401, 'Session expired or invalid', { authenticated: false });
|
if (!valid) return errorResponse(res, 401, 'Session expired or invalid', { authenticated: false });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Session valid (or TOTP disabled) - inject credentials if premium SSO is active
|
// Session valid (or TOTP disabled) - inject credentials if premium SSO is active
|
||||||
@@ -73,18 +90,18 @@ module.exports = function(ctx, getAppSession, appSessionCache) {
|
|||||||
const apiKey = arrKey || svcKey;
|
const apiKey = arrKey || svcKey;
|
||||||
if (apiKey) { res.setHeader('X-Api-Key', apiKey); injected = true; }
|
if (apiKey) { res.setHeader('X-Api-Key', apiKey); injected = true; }
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('auth', 'Credential error', { serviceId, error: e.message });
|
log.warn('auth', 'Credential error', { serviceId, error: e.message });
|
||||||
}
|
}
|
||||||
|
|
||||||
res.status(200).json({ authenticated: true, credentialsInjected: injected });
|
res.status(200).json({ authenticated: true, credentialsInjected: injected });
|
||||||
}, 'auth-gate'));
|
}, 'auth-gate'));
|
||||||
|
|
||||||
// Return cached app session token for client-side auth (Premium SSO feature)
|
// Return cached app session token for client-side auth (Premium SSO feature)
|
||||||
router.get('/auth/app-token/:serviceId', ctx.licenseManager.requirePremium('sso'), ctx.asyncHandler(async (req, res) => {
|
router.get('/auth/app-token/:serviceId', ctx.licenseManager.requirePremium('sso'), asyncHandler(async (req, res) => {
|
||||||
const { serviceId } = req.params;
|
const { serviceId } = req.params;
|
||||||
|
|
||||||
if (ctx.totpConfig.enabled && ctx.totpConfig.sessionDuration !== 'never') {
|
if (totpConfig.enabled && totpConfig.sessionDuration !== 'never') {
|
||||||
if (!ctx.session.isValid(req)) throw new AuthenticationError('Not authenticated');
|
if (!session.isValid(req)) throw new AuthenticationError('Not authenticated');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Jellyfin/Emby: separate browser-specific token
|
// Jellyfin/Emby: separate browser-specific token
|
||||||
@@ -92,7 +109,7 @@ module.exports = function(ctx, getAppSession, appSessionCache) {
|
|||||||
const browserCacheKey = `${serviceId}_browser`;
|
const browserCacheKey = `${serviceId}_browser`;
|
||||||
const browserCached = appSessionCache.get(browserCacheKey);
|
const browserCached = appSessionCache.get(browserCacheKey);
|
||||||
if (browserCached && browserCached.exp > Date.now()) {
|
if (browserCached && browserCached.exp > Date.now()) {
|
||||||
if (browserCached.failed) return ctx.errorResponse(res, 500, 'Login recently failed');
|
if (browserCached.failed) return errorResponse(res, 500, 'Login recently failed');
|
||||||
if (browserCached.token) {
|
if (browserCached.token) {
|
||||||
const resp = { token: browserCached.token };
|
const resp = { token: browserCached.token };
|
||||||
if (browserCached.tokenData) Object.assign(resp, browserCached.tokenData);
|
if (browserCached.tokenData) Object.assign(resp, browserCached.tokenData);
|
||||||
@@ -118,17 +135,17 @@ module.exports = function(ctx, getAppSession, appSessionCache) {
|
|||||||
appSessionCache.set(browserCacheKey, { token: authData.AccessToken, tokenData, exp: Date.now() + SESSION_TTL.TOKEN_SESSION });
|
appSessionCache.set(browserCacheKey, { token: authData.AccessToken, tokenData, exp: Date.now() + SESSION_TTL.TOKEN_SESSION });
|
||||||
return res.json({ token: authData.AccessToken, ...tokenData });
|
return res.json({ token: authData.AccessToken, ...tokenData });
|
||||||
}
|
}
|
||||||
return ctx.errorResponse(res, 500, '[DC-501] Authentication failed');
|
return errorResponse(res, 500, '[DC-501] Authentication failed');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('auth', 'Browser token error', { serviceId, error: e.message });
|
log.warn('auth', 'Browser token error', { serviceId, error: e.message });
|
||||||
return ctx.errorResponse(res, 500, e.message);
|
return errorResponse(res, 500, e.message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check cache first
|
// Check cache first
|
||||||
const cached = appSessionCache.get(serviceId);
|
const cached = appSessionCache.get(serviceId);
|
||||||
if (cached && cached.exp > Date.now()) {
|
if (cached && cached.exp > Date.now()) {
|
||||||
if (cached.failed) return ctx.errorResponse(res, 500, '[DC-501] Login recently failed, retrying in a few minutes');
|
if (cached.failed) return errorResponse(res, 500, '[DC-501] Login recently failed, retrying in a few minutes');
|
||||||
if (cached.token) {
|
if (cached.token) {
|
||||||
const resp = { token: cached.token };
|
const resp = { token: cached.token };
|
||||||
if (cached.tokenData) Object.assign(resp, cached.tokenData);
|
if (cached.tokenData) Object.assign(resp, cached.tokenData);
|
||||||
@@ -172,10 +189,10 @@ module.exports = function(ctx, getAppSession, appSessionCache) {
|
|||||||
return res.json({ cookies: appCookies });
|
return res.json({ cookies: appCookies });
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.errorResponse(res, 500, '[DC-501] Login failed');
|
errorResponse(res, 500, '[DC-501] Login failed');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('auth', 'App-token error', { error: e.message });
|
log.warn('auth', 'App-token error', { error: e.message });
|
||||||
ctx.errorResponse(res, 500, e.message);
|
errorResponse(res, 500, e.message);
|
||||||
}
|
}
|
||||||
}, 'auth-app-token'));
|
}, 'auth-app-token'));
|
||||||
|
|
||||||
|
|||||||
@@ -2,11 +2,32 @@ const express = require('express');
|
|||||||
const { renewCSRFToken } = require('../../csrf-protection');
|
const { renewCSRFToken } = require('../../csrf-protection');
|
||||||
const { ValidationError, AuthenticationError } = require('../../errors');
|
const { ValidationError, AuthenticationError } = require('../../errors');
|
||||||
|
|
||||||
module.exports = function(ctx) {
|
/**
|
||||||
|
* Auth TOTP routes factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.authManager - Auth manager
|
||||||
|
* @param {Object} deps.credentialManager - Credential manager
|
||||||
|
* @param {Object} deps.totpConfig - TOTP configuration
|
||||||
|
* @param {Function} deps.saveTotpConfig - Save TOTP config helper
|
||||||
|
* @param {Object} deps.session - Session context
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Function} deps.errorResponse - Error response helper
|
||||||
|
* @param {Object} deps.log - Logger instance
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
module.exports = function({ authManager, credentialManager, totpConfig, saveTotpConfig, session, asyncHandler, errorResponse, log }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
|
// Ctx shim for backward compatibility
|
||||||
|
const ctx = {
|
||||||
|
credentialManager,
|
||||||
|
totpConfig,
|
||||||
|
saveTotpConfig,
|
||||||
|
session
|
||||||
|
};
|
||||||
|
|
||||||
// Get current TOTP config (public route)
|
// Get current TOTP config (public route)
|
||||||
router.get('/totp/config', ctx.asyncHandler(async (req, res) => {
|
router.get('/totp/config', asyncHandler(async (req, res) => {
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
config: {
|
config: {
|
||||||
@@ -18,7 +39,7 @@ module.exports = function(ctx) {
|
|||||||
}, 'totp-config-get'));
|
}, 'totp-config-get'));
|
||||||
|
|
||||||
// Generate new TOTP secret + QR code
|
// Generate new TOTP secret + QR code
|
||||||
router.post('/totp/setup', ctx.asyncHandler(async (req, res) => {
|
router.post('/totp/setup', asyncHandler(async (req, res) => {
|
||||||
const { authenticator } = require('otplib');
|
const { authenticator } = require('otplib');
|
||||||
const QRCode = require('qrcode');
|
const QRCode = require('qrcode');
|
||||||
|
|
||||||
@@ -46,7 +67,7 @@ module.exports = function(ctx) {
|
|||||||
}, 'totp-setup'));
|
}, 'totp-setup'));
|
||||||
|
|
||||||
// Verify first code to confirm setup, then activate TOTP
|
// Verify first code to confirm setup, then activate TOTP
|
||||||
router.post('/totp/verify-setup', ctx.asyncHandler(async (req, res) => {
|
router.post('/totp/verify-setup', asyncHandler(async (req, res) => {
|
||||||
const { authenticator } = require('otplib');
|
const { authenticator } = require('otplib');
|
||||||
const { code } = req.body;
|
const { code } = req.body;
|
||||||
|
|
||||||
@@ -83,7 +104,7 @@ module.exports = function(ctx) {
|
|||||||
}, 'totp-verify-setup'));
|
}, 'totp-verify-setup'));
|
||||||
|
|
||||||
// Login: verify TOTP code and set session cookie
|
// Login: verify TOTP code and set session cookie
|
||||||
router.post('/totp/verify', ctx.asyncHandler(async (req, res) => {
|
router.post('/totp/verify', asyncHandler(async (req, res) => {
|
||||||
const { authenticator } = require('otplib');
|
const { authenticator } = require('otplib');
|
||||||
const { code } = req.body;
|
const { code } = req.body;
|
||||||
|
|
||||||
@@ -105,19 +126,19 @@ module.exports = function(ctx) {
|
|||||||
throw new AuthenticationError('[DC-111] Invalid code');
|
throw new AuthenticationError('[DC-111] Invalid code');
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.log.info('auth', 'TOTP verified, creating session', { ip: ctx.session.getClientIP(req), duration: ctx.totpConfig.sessionDuration });
|
log.info('auth', 'TOTP verified, creating session', { ip: ctx.session.getClientIP(req), duration: ctx.totpConfig.sessionDuration });
|
||||||
ctx.session.create(req, ctx.totpConfig.sessionDuration);
|
ctx.session.create(req, ctx.totpConfig.sessionDuration);
|
||||||
ctx.session.setCookie(res, ctx.totpConfig.sessionDuration);
|
ctx.session.setCookie(res, ctx.totpConfig.sessionDuration);
|
||||||
|
|
||||||
// Rotate CSRF token for the new session
|
// Rotate CSRF token for the new session
|
||||||
const newCsrfToken = renewCSRFToken(res, req.secure || req.protocol === 'https');
|
const newCsrfToken = renewCSRFToken(res, req.secure || req.protocol === 'https');
|
||||||
|
|
||||||
ctx.log.debug('auth', 'Session created', { sessions: ctx.session.ipSessions.size });
|
log.debug('auth', 'Session created', { sessions: ctx.session.ipSessions.size });
|
||||||
res.json({ success: true, message: 'Authenticated successfully', sessionDuration: ctx.totpConfig.sessionDuration, csrfToken: newCsrfToken });
|
res.json({ success: true, message: 'Authenticated successfully', sessionDuration: ctx.totpConfig.sessionDuration, csrfToken: newCsrfToken });
|
||||||
}, 'totp-verify'));
|
}, 'totp-verify'));
|
||||||
|
|
||||||
// Check session validity (used by Caddy forward_auth)
|
// Check session validity (used by Caddy forward_auth)
|
||||||
router.get('/totp/check-session', ctx.asyncHandler(async (req, res) => {
|
router.get('/totp/check-session', asyncHandler(async (req, res) => {
|
||||||
// Never cache session checks — stale cached 200s cause auth loops
|
// Never cache session checks — stale cached 200s cause auth loops
|
||||||
res.setHeader('Cache-Control', 'no-store, no-cache, must-revalidate');
|
res.setHeader('Cache-Control', 'no-store, no-cache, must-revalidate');
|
||||||
res.setHeader('Pragma', 'no-cache');
|
res.setHeader('Pragma', 'no-cache');
|
||||||
@@ -127,7 +148,7 @@ module.exports = function(ctx) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const valid = ctx.session.isValid(req);
|
const valid = ctx.session.isValid(req);
|
||||||
ctx.log.debug('auth', 'Session check', { ip: ctx.session.getClientIP(req), valid, sessions: ctx.session.ipSessions.size });
|
log.debug('auth', 'Session check', { ip: ctx.session.getClientIP(req), valid, sessions: ctx.session.ipSessions.size });
|
||||||
if (valid) {
|
if (valid) {
|
||||||
return res.status(200).json({ authenticated: true });
|
return res.status(200).json({ authenticated: true });
|
||||||
}
|
}
|
||||||
@@ -136,7 +157,7 @@ module.exports = function(ctx) {
|
|||||||
}, 'totp-check-session'));
|
}, 'totp-check-session'));
|
||||||
|
|
||||||
// Disable TOTP
|
// Disable TOTP
|
||||||
router.post('/totp/disable', ctx.asyncHandler(async (req, res) => {
|
router.post('/totp/disable', asyncHandler(async (req, res) => {
|
||||||
const { code } = req.body;
|
const { code } = req.body;
|
||||||
|
|
||||||
// Always require a valid TOTP code when TOTP is active
|
// Always require a valid TOTP code when TOTP is active
|
||||||
@@ -169,10 +190,10 @@ module.exports = function(ctx) {
|
|||||||
}, 'totp-disable'));
|
}, 'totp-disable'));
|
||||||
|
|
||||||
// Update TOTP settings (session duration)
|
// Update TOTP settings (session duration)
|
||||||
router.post('/totp/config', ctx.asyncHandler(async (req, res) => {
|
router.post('/totp/config', asyncHandler(async (req, res) => {
|
||||||
const { sessionDuration } = req.body;
|
const { sessionDuration } = req.body;
|
||||||
|
|
||||||
if (sessionDuration && !ctx.session.durations.hasOwnProperty(sessionDuration)) {
|
if (sessionDuration && !Object.prototype.hasOwnProperty.call(ctx.session.durations, sessionDuration)) {
|
||||||
throw new ValidationError(`Invalid session duration. Valid options: ${Object.keys(ctx.session.durations).join(', ')}`, 'sessionDuration');
|
throw new ValidationError(`Invalid session duration. Valid options: ${Object.keys(ctx.session.durations).join(', ')}`, 'sessionDuration');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -4,8 +4,18 @@ const fsp = require('fs').promises;
|
|||||||
const path = require('path');
|
const path = require('path');
|
||||||
const { exists, isAccessible } = require('../fs-helpers');
|
const { exists, isAccessible } = require('../fs-helpers');
|
||||||
const { paginate, parsePaginationParams } = require('../pagination');
|
const { paginate, parsePaginationParams } = require('../pagination');
|
||||||
|
const { ValidationError } = require('../errors');
|
||||||
|
|
||||||
module.exports = function(ctx) {
|
/**
|
||||||
|
* Browse route factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Function} deps.validateSecurePath - Path traversal validator
|
||||||
|
* @param {Object} deps.auditLogger - Audit logger
|
||||||
|
* @param {Object} deps.docker - Docker client
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
module.exports = function({ asyncHandler, validateSecurePath, auditLogger, docker }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// Parse browse roots from environment
|
// Parse browse roots from environment
|
||||||
@@ -20,7 +30,7 @@ module.exports = function(ctx) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Get available browse roots
|
// Get available browse roots
|
||||||
router.get('/browse/roots', ctx.asyncHandler(async (req, res) => {
|
router.get('/browse/roots', asyncHandler(async (req, res) => {
|
||||||
const allRoots = BROWSE_ROOTS.map(r => ({
|
const allRoots = BROWSE_ROOTS.map(r => ({
|
||||||
name: r.hostPath,
|
name: r.hostPath,
|
||||||
path: r.hostPath,
|
path: r.hostPath,
|
||||||
@@ -38,7 +48,7 @@ module.exports = function(ctx) {
|
|||||||
}, 'browse-roots'));
|
}, 'browse-roots'));
|
||||||
|
|
||||||
// Browse directory contents
|
// Browse directory contents
|
||||||
router.get('/browse/directories', ctx.asyncHandler(async (req, res) => {
|
router.get('/browse/directories', asyncHandler(async (req, res) => {
|
||||||
const requestedPath = req.query.path || '';
|
const requestedPath = req.query.path || '';
|
||||||
|
|
||||||
if (!requestedPath) {
|
if (!requestedPath) {
|
||||||
@@ -62,7 +72,7 @@ module.exports = function(ctx) {
|
|||||||
);
|
);
|
||||||
|
|
||||||
if (!matchingRoot) {
|
if (!matchingRoot) {
|
||||||
return ctx.errorResponse(res, 400, 'Path not in browseable roots', {
|
throw new ValidationError('Path not in browseable roots', {
|
||||||
availableRoots: BROWSE_ROOTS.map(r => r.hostPath)
|
availableRoots: BROWSE_ROOTS.map(r => r.hostPath)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -73,10 +83,10 @@ module.exports = function(ctx) {
|
|||||||
const allowedRoots = BROWSE_ROOTS.map(r => r.containerPath);
|
const allowedRoots = BROWSE_ROOTS.map(r => r.containerPath);
|
||||||
let resolvedPath;
|
let resolvedPath;
|
||||||
try {
|
try {
|
||||||
resolvedPath = await ctx.validateSecurePath(containerFullPath, allowedRoots, ctx.auditLogger);
|
resolvedPath = await validateSecurePath(containerFullPath, allowedRoots, auditLogger);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error.constructor.name === 'ValidationError') {
|
if (error.constructor.name === 'ValidationError') {
|
||||||
ctx.auditLogger.logSecurityEvent('path_traversal_attempt', {
|
auditLogger.logSecurityEvent('path_traversal_attempt', {
|
||||||
requestedPath, containerFullPath, allowedRoots,
|
requestedPath, containerFullPath, allowedRoots,
|
||||||
error: error.message,
|
error: error.message,
|
||||||
ip: req.ip,
|
ip: req.ip,
|
||||||
@@ -124,7 +134,7 @@ module.exports = function(ctx) {
|
|||||||
}, 'browse-dir'));
|
}, 'browse-dir'));
|
||||||
|
|
||||||
// Detect media mounts from existing media server containers
|
// Detect media mounts from existing media server containers
|
||||||
router.get('/media/detected-mounts', ctx.asyncHandler(async (req, res) => {
|
router.get('/media/detected-mounts', asyncHandler(async (req, res) => {
|
||||||
const mediaServerPatterns = [
|
const mediaServerPatterns = [
|
||||||
'plex', 'jellyfin', 'emby', 'kodi', 'navidrome', 'airsonic',
|
'plex', 'jellyfin', 'emby', 'kodi', 'navidrome', 'airsonic',
|
||||||
'subsonic', 'funkwhale', 'beets', 'lidarr', 'sonarr', 'radarr',
|
'subsonic', 'funkwhale', 'beets', 'lidarr', 'sonarr', 'radarr',
|
||||||
@@ -136,7 +146,7 @@ module.exports = function(ctx) {
|
|||||||
'/tmp', '/var', '/etc', '/opt', '/root', '/home', '/.', '/caddyfile'
|
'/tmp', '/var', '/etc', '/opt', '/root', '/home', '/.', '/caddyfile'
|
||||||
];
|
];
|
||||||
|
|
||||||
const containers = await ctx.docker.client.listContainers({ all: false });
|
const containers = await docker.client.listContainers({ all: false });
|
||||||
const detectedMounts = [];
|
const detectedMounts = [];
|
||||||
const seenPaths = new Set();
|
const seenPaths = new Set();
|
||||||
|
|
||||||
@@ -145,7 +155,7 @@ module.exports = function(ctx) {
|
|||||||
const isMediaServer = mediaServerPatterns.some(p => imageName.includes(p));
|
const isMediaServer = mediaServerPatterns.some(p => imageName.includes(p));
|
||||||
if (!isMediaServer) continue;
|
if (!isMediaServer) continue;
|
||||||
|
|
||||||
const container = ctx.docker.client.getContainer(containerInfo.Id);
|
const container = docker.client.getContainer(containerInfo.Id);
|
||||||
const details = await container.inspect();
|
const details = await container.inspect();
|
||||||
const binds = details.HostConfig?.Binds || [];
|
const binds = details.HostConfig?.Binds || [];
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,15 @@ const fsp = require('fs').promises;
|
|||||||
const path = require('path');
|
const path = require('path');
|
||||||
const { LIMITS } = require('../../constants');
|
const { LIMITS } = require('../../constants');
|
||||||
const { exists } = require('../../fs-helpers');
|
const { exists } = require('../../fs-helpers');
|
||||||
const { ValidationError } = require('../errors');
|
const { ValidationError } = require('../../errors');
|
||||||
|
/**
|
||||||
|
* Config assets routes factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.servicesStateManager - Services state manager
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Object} deps.log - Logger instance
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
|
||||||
// Image processing for favicon conversion (optional)
|
// Image processing for favicon conversion (optional)
|
||||||
let sharp, pngToIco;
|
let sharp, pngToIco;
|
||||||
@@ -14,12 +22,12 @@ try {
|
|||||||
// Image processing libraries not available — favicon conversion disabled
|
// Image processing libraries not available — favicon conversion disabled
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = function(ctx) {
|
module.exports = function({ servicesStateManager, asyncHandler, log }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// ===== ASSET UPLOAD =====
|
// ===== ASSET UPLOAD =====
|
||||||
|
|
||||||
router.post('/assets/upload', express.json({ limit: LIMITS.BODY_UPLOAD }), ctx.asyncHandler(async (req, res) => {
|
router.post('/assets/upload', express.json({ limit: LIMITS.BODY_UPLOAD }), asyncHandler(async (req, res) => {
|
||||||
const { filename, data } = req.body;
|
const { filename, data } = req.body;
|
||||||
|
|
||||||
if (!filename || !data) {
|
if (!filename || !data) {
|
||||||
@@ -65,7 +73,7 @@ module.exports = function(ctx) {
|
|||||||
// Manage custom dashboard logo
|
// Manage custom dashboard logo
|
||||||
|
|
||||||
// Get current logo path, position, and title
|
// Get current logo path, position, and title
|
||||||
router.get('/logo', ctx.asyncHandler(async (req, res) => {
|
router.get('/logo', asyncHandler(async (req, res) => {
|
||||||
const config = await ctx.readConfig();
|
const config = await ctx.readConfig();
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@@ -100,7 +108,7 @@ module.exports = function(ctx) {
|
|||||||
|
|
||||||
// Upload custom logo(s) and/or update position and title
|
// Upload custom logo(s) and/or update position and title
|
||||||
// Supports: dataDark/dataLight (separate variants) or data (single logo for both)
|
// Supports: dataDark/dataLight (separate variants) or data (single logo for both)
|
||||||
router.post('/logo', express.json({ limit: LIMITS.BODY_UPLOAD }), ctx.asyncHandler(async (req, res) => {
|
router.post('/logo', express.json({ limit: LIMITS.BODY_UPLOAD }), asyncHandler(async (req, res) => {
|
||||||
const { data, dataDark, dataLight, position, dashboardTitle } = req.body;
|
const { data, dataDark, dataLight, position, dashboardTitle } = req.body;
|
||||||
|
|
||||||
if (!data && !dataDark && !dataLight && !position && !dashboardTitle) {
|
if (!data && !dataDark && !dataLight && !position && !dashboardTitle) {
|
||||||
@@ -159,7 +167,7 @@ module.exports = function(ctx) {
|
|||||||
}, 'logo-upload'));
|
}, 'logo-upload'));
|
||||||
|
|
||||||
// Reset all branding to defaults
|
// Reset all branding to defaults
|
||||||
router.delete('/logo', ctx.asyncHandler(async (req, res) => {
|
router.delete('/logo', asyncHandler(async (req, res) => {
|
||||||
const config = await ctx.readConfig();
|
const config = await ctx.readConfig();
|
||||||
const assetsPath = process.env.ASSETS_PATH || '/app/assets';
|
const assetsPath = process.env.ASSETS_PATH || '/app/assets';
|
||||||
|
|
||||||
@@ -195,7 +203,7 @@ module.exports = function(ctx) {
|
|||||||
// Upload and convert favicon (PNG/SVG to ICO)
|
// Upload and convert favicon (PNG/SVG to ICO)
|
||||||
|
|
||||||
// Get current favicon
|
// Get current favicon
|
||||||
router.get('/favicon', ctx.asyncHandler(async (req, res) => {
|
router.get('/favicon', asyncHandler(async (req, res) => {
|
||||||
const config = await ctx.readConfig();
|
const config = await ctx.readConfig();
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@@ -205,7 +213,7 @@ module.exports = function(ctx) {
|
|||||||
}, 'favicon-get'));
|
}, 'favicon-get'));
|
||||||
|
|
||||||
// Upload and convert favicon
|
// Upload and convert favicon
|
||||||
router.post('/favicon', ctx.asyncHandler(async (req, res) => {
|
router.post('/favicon', asyncHandler(async (req, res) => {
|
||||||
const { data } = req.body;
|
const { data } = req.body;
|
||||||
|
|
||||||
if (!data) {
|
if (!data) {
|
||||||
@@ -267,7 +275,7 @@ module.exports = function(ctx) {
|
|||||||
}, 'favicon'));
|
}, 'favicon'));
|
||||||
|
|
||||||
// Reset favicon to default
|
// Reset favicon to default
|
||||||
router.delete('/favicon', ctx.asyncHandler(async (req, res) => {
|
router.delete('/favicon', asyncHandler(async (req, res) => {
|
||||||
const config = await ctx.readConfig();
|
const config = await ctx.readConfig();
|
||||||
|
|
||||||
// Delete custom favicon files
|
// Delete custom favicon files
|
||||||
|
|||||||
@@ -3,13 +3,31 @@ const fs = require('fs');
|
|||||||
const path = require('path');
|
const path = require('path');
|
||||||
const { CADDY } = require('../../constants');
|
const { CADDY } = require('../../constants');
|
||||||
const { exists } = require('../../fs-helpers');
|
const { exists } = require('../../fs-helpers');
|
||||||
const { ValidationError, AuthenticationError } = require('../errors');
|
const { ValidationError, AuthenticationError } = require('../../errors');
|
||||||
|
|
||||||
module.exports = function(ctx) {
|
/**
|
||||||
|
* Config backup routes factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
module.exports = function(deps) {
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
const THEMES_DIR = process.env.THEMES_DIR || path.join(path.dirname(ctx.SERVICES_FILE), 'themes');
|
// Extract dependencies
|
||||||
|
const {
|
||||||
|
configStateManager, servicesStateManager, asyncHandler, log,
|
||||||
|
SERVICES_FILE, CONFIG_FILE, TOTP_CONFIG_FILE, TAILSCALE_CONFIG_FILE, NOTIFICATIONS_FILE,
|
||||||
|
caddy, dns, fetchT, totpConfig, credentialManager, loadSiteConfig, loadNotificationConfig, session, saveTotpConfig
|
||||||
|
} = deps;
|
||||||
|
|
||||||
|
// Create ctx-like object for compatibility with existing code
|
||||||
|
const ctx = {
|
||||||
|
SERVICES_FILE, CONFIG_FILE, TOTP_CONFIG_FILE, TAILSCALE_CONFIG_FILE, NOTIFICATIONS_FILE,
|
||||||
|
caddy, dns, fetchT, totpConfig, credentialManager, loadSiteConfig, loadNotificationConfig
|
||||||
|
};
|
||||||
|
|
||||||
|
const THEMES_DIR = process.env.THEMES_DIR || path.join(path.dirname(SERVICES_FILE), 'themes');
|
||||||
|
|
||||||
function readAllThemes() {
|
function readAllThemes() {
|
||||||
const themes = {};
|
const themes = {};
|
||||||
@@ -28,7 +46,7 @@ module.exports = function(ctx) {
|
|||||||
// Unified v2.0 backup — server config + encryption key + themes (browser state added client-side)
|
// Unified v2.0 backup — server config + encryption key + themes (browser state added client-side)
|
||||||
|
|
||||||
// Export all configuration as a downloadable JSON bundle
|
// Export all configuration as a downloadable JSON bundle
|
||||||
router.get('/backup/export', ctx.asyncHandler(async (req, res) => {
|
router.get('/backup/export', asyncHandler(async (req, res) => {
|
||||||
const backup = {
|
const backup = {
|
||||||
version: '2.0',
|
version: '2.0',
|
||||||
exportedAt: new Date().toISOString(),
|
exportedAt: new Date().toISOString(),
|
||||||
@@ -72,7 +90,7 @@ module.exports = function(ctx) {
|
|||||||
backup.files[file.key] = { type: 'missing', data: null };
|
backup.files[file.key] = { type: 'missing', data: null };
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('backup', `Could not backup ${file.key}`, { error: e.message });
|
log.warn('backup', `Could not backup ${file.key}`, { error: e.message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -91,7 +109,7 @@ module.exports = function(ctx) {
|
|||||||
backup.totp = { qrCode: qrDataUrl, issuer: 'DashCaddy' };
|
backup.totp = { qrCode: qrDataUrl, issuer: 'DashCaddy' };
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('backup', 'Could not include TOTP QR in backup', { error: e.message });
|
log.warn('backup', 'Could not include TOTP QR in backup', { error: e.message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -110,14 +128,14 @@ module.exports = function(ctx) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('backup', 'Could not include assets in backup', { error: e.message });
|
log.warn('backup', 'Could not include assets in backup', { error: e.message });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Include user-created themes
|
// Include user-created themes
|
||||||
try {
|
try {
|
||||||
backup.themes = readAllThemes();
|
backup.themes = readAllThemes();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('backup', 'Could not include themes in backup', { error: e.message });
|
log.warn('backup', 'Could not include themes in backup', { error: e.message });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set headers for file download
|
// Set headers for file download
|
||||||
@@ -126,11 +144,11 @@ module.exports = function(ctx) {
|
|||||||
res.setHeader('Content-Disposition', `attachment; filename="${backupFilename}"`);
|
res.setHeader('Content-Disposition', `attachment; filename="${backupFilename}"`);
|
||||||
|
|
||||||
res.json(backup);
|
res.json(backup);
|
||||||
ctx.log.info('backup', 'Backup exported successfully');
|
log.info('backup', 'Backup exported successfully');
|
||||||
}, 'backup-export'));
|
}, 'backup-export'));
|
||||||
|
|
||||||
// Preview what will be restored (without making changes)
|
// Preview what will be restored (without making changes)
|
||||||
router.post('/backup/preview', ctx.asyncHandler(async (req, res) => {
|
router.post('/backup/preview', asyncHandler(async (req, res) => {
|
||||||
const backup = req.body;
|
const backup = req.body;
|
||||||
|
|
||||||
if (!backup || !backup.version || !backup.files) {
|
if (!backup || !backup.version || !backup.files) {
|
||||||
@@ -195,7 +213,7 @@ module.exports = function(ctx) {
|
|||||||
}, 'backup-preview'));
|
}, 'backup-preview'));
|
||||||
|
|
||||||
// Restore configuration from backup
|
// Restore configuration from backup
|
||||||
router.post('/backup/restore', ctx.asyncHandler(async (req, res) => {
|
router.post('/backup/restore', asyncHandler(async (req, res) => {
|
||||||
const { backup, options = {}, totpCode } = req.body;
|
const { backup, options = {}, totpCode } = req.body;
|
||||||
|
|
||||||
if (!backup || !backup.version || !backup.files) {
|
if (!backup || !backup.version || !backup.files) {
|
||||||
@@ -274,7 +292,7 @@ module.exports = function(ctx) {
|
|||||||
|
|
||||||
await fsp.writeFile(filePath, content, 'utf8');
|
await fsp.writeFile(filePath, content, 'utf8');
|
||||||
results.restored.push(key);
|
results.restored.push(key);
|
||||||
ctx.log.info('backup', `Restored: ${key}`, { path: filePath });
|
log.info('backup', `Restored: ${key}`, { path: filePath });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
results.errors.push({ file: key, error: e.message });
|
results.errors.push({ file: key, error: e.message });
|
||||||
}
|
}
|
||||||
@@ -350,7 +368,7 @@ module.exports = function(ctx) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
results.restored.push(`themes:${Object.keys(backup.themes).length}`);
|
results.restored.push(`themes:${Object.keys(backup.themes).length}`);
|
||||||
ctx.log.info('backup', `Restored ${Object.keys(backup.themes).length} themes`);
|
log.info('backup', `Restored ${Object.keys(backup.themes).length} themes`);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
results.errors.push({ file: 'themes', error: e.message });
|
results.errors.push({ file: 'themes', error: e.message });
|
||||||
}
|
}
|
||||||
@@ -366,7 +384,7 @@ module.exports = function(ctx) {
|
|||||||
}
|
}
|
||||||
results.encryptionKeyReloaded = true;
|
results.encryptionKeyReloaded = true;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('backup', 'Could not reload encryption key', { error: e.message });
|
log.warn('backup', 'Could not reload encryption key', { error: e.message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -380,7 +398,7 @@ module.exports = function(ctx) {
|
|||||||
results
|
results
|
||||||
});
|
});
|
||||||
|
|
||||||
ctx.log.info('backup', 'Backup restore completed', { restored: results.restored.length, errors: results.errors.length });
|
log.info('backup', 'Backup restore completed', { restored: results.restored.length, errors: results.errors.length });
|
||||||
}, 'backup-restore'));
|
}, 'backup-restore'));
|
||||||
|
|
||||||
return router;
|
return router;
|
||||||
|
|||||||
@@ -1,9 +1,42 @@
|
|||||||
const express = require('express');
|
const express = require('express');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Config routes aggregator
|
||||||
|
* @param {Object} ctx - Application context (for backward compatibility)
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
module.exports = function(ctx) {
|
module.exports = function(ctx) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
router.use(require('./settings')(ctx));
|
|
||||||
router.use(require('./assets')(ctx));
|
// Common deps for all config routes
|
||||||
router.use(require('./backup')(ctx));
|
const baseDeps = {
|
||||||
|
configStateManager: ctx.configStateManager,
|
||||||
|
servicesStateManager: ctx.servicesStateManager,
|
||||||
|
asyncHandler: ctx.asyncHandler,
|
||||||
|
log: ctx.log
|
||||||
|
};
|
||||||
|
|
||||||
|
// Additional deps for backup route
|
||||||
|
const backupDeps = {
|
||||||
|
...baseDeps,
|
||||||
|
SERVICES_FILE: ctx.SERVICES_FILE,
|
||||||
|
CONFIG_FILE: ctx.CONFIG_FILE,
|
||||||
|
TOTP_CONFIG_FILE: ctx.TOTP_CONFIG_FILE,
|
||||||
|
TAILSCALE_CONFIG_FILE: ctx.TAILSCALE_CONFIG_FILE,
|
||||||
|
NOTIFICATIONS_FILE: ctx.NOTIFICATIONS_FILE,
|
||||||
|
caddy: ctx.caddy,
|
||||||
|
dns: ctx.dns,
|
||||||
|
fetchT: ctx.fetchT,
|
||||||
|
totpConfig: ctx.totpConfig,
|
||||||
|
credentialManager: ctx.credentialManager,
|
||||||
|
loadSiteConfig: ctx.loadSiteConfig,
|
||||||
|
loadNotificationConfig: ctx.loadNotificationConfig,
|
||||||
|
session: ctx.session,
|
||||||
|
saveTotpConfig: ctx.saveTotpConfig
|
||||||
|
};
|
||||||
|
|
||||||
|
router.use(require('./settings')(baseDeps));
|
||||||
|
router.use(require('./assets')({ ...baseDeps, CONFIG_FILE: ctx.CONFIG_FILE, readConfig: ctx.readConfig, saveConfig: ctx.saveConfig, errorResponse: ctx.errorResponse }));
|
||||||
|
router.use(require('./backup')(backupDeps));
|
||||||
return router;
|
return router;
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,16 +1,24 @@
|
|||||||
const fsp = require('fs').promises;
|
const fsp = require('fs').promises;
|
||||||
const { validateConfig } = require('../../config-schema');
|
const { validateConfig } = require('../../config-schema');
|
||||||
const { exists } = require('../../fs-helpers');
|
const { exists } = require('../../fs-helpers');
|
||||||
const { ValidationError } = require('../errors');
|
const { ValidationError } = require('../../errors');
|
||||||
|
|
||||||
module.exports = function(ctx) {
|
module.exports = function({ configStateManager, asyncHandler, log }) {
|
||||||
|
/**
|
||||||
|
* Config settings routes factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.configStateManager - Config state manager
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Object} deps.log - Logger instance
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// ===== DASHCADDY CONFIG ENDPOINTS =====
|
// ===== DASHCADDY CONFIG ENDPOINTS =====
|
||||||
// Server-side config storage for setup wizard (shared across all browsers/machines)
|
// Server-side config storage for setup wizard (shared across all browsers/machines)
|
||||||
|
|
||||||
router.get('/config', ctx.asyncHandler(async (req, res) => {
|
router.get('/config', asyncHandler(async (req, res) => {
|
||||||
if (!await exists(ctx.CONFIG_FILE)) {
|
if (!await exists(ctx.CONFIG_FILE)) {
|
||||||
return res.json({ setupComplete: false });
|
return res.json({ setupComplete: false });
|
||||||
}
|
}
|
||||||
@@ -19,7 +27,7 @@ module.exports = function(ctx) {
|
|||||||
res.json(config);
|
res.json(config);
|
||||||
}, 'config-get'));
|
}, 'config-get'));
|
||||||
|
|
||||||
router.post('/config', ctx.asyncHandler(async (req, res) => {
|
router.post('/config', asyncHandler(async (req, res) => {
|
||||||
const incoming = req.body;
|
const incoming = req.body;
|
||||||
|
|
||||||
if (!incoming || typeof incoming !== 'object') {
|
if (!incoming || typeof incoming !== 'object') {
|
||||||
@@ -55,12 +63,12 @@ module.exports = function(ctx) {
|
|||||||
|
|
||||||
await fsp.writeFile(ctx.CONFIG_FILE, JSON.stringify(config, null, 2), 'utf8');
|
await fsp.writeFile(ctx.CONFIG_FILE, JSON.stringify(config, null, 2), 'utf8');
|
||||||
ctx.loadSiteConfig(); // Refresh in-memory config
|
ctx.loadSiteConfig(); // Refresh in-memory config
|
||||||
ctx.log.info('config', 'Config saved', { path: ctx.CONFIG_FILE });
|
log.info('config', 'Config saved', { path: ctx.CONFIG_FILE });
|
||||||
|
|
||||||
res.json({ success: true, message: 'Configuration saved', config, warnings });
|
res.json({ success: true, message: 'Configuration saved', config, warnings });
|
||||||
}, 'config-save'));
|
}, 'config-save'));
|
||||||
|
|
||||||
router.delete('/config', ctx.asyncHandler(async (req, res) => {
|
router.delete('/config', asyncHandler(async (req, res) => {
|
||||||
if (await exists(ctx.CONFIG_FILE)) {
|
if (await exists(ctx.CONFIG_FILE)) {
|
||||||
await fsp.unlink(ctx.CONFIG_FILE);
|
await fsp.unlink(ctx.CONFIG_FILE);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,13 +4,23 @@ const fsp = require('fs').promises;
|
|||||||
const path = require('path');
|
const path = require('path');
|
||||||
const { exists } = require('../fs-helpers');
|
const { exists } = require('../fs-helpers');
|
||||||
const { paginate, parsePaginationParams } = require('../pagination');
|
const { paginate, parsePaginationParams } = require('../pagination');
|
||||||
|
const { NotFoundError } = require('../errors');
|
||||||
|
|
||||||
module.exports = function(ctx) {
|
/**
|
||||||
|
* Logs route factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Object} deps.docker - Docker client
|
||||||
|
* @param {Object} deps.logDigest - Log digest manager (optional)
|
||||||
|
* @param {Object} deps.dockerMaintenance - Docker maintenance module (optional)
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
module.exports = function({ asyncHandler, docker, logDigest, dockerMaintenance }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// List containers with logs
|
// List containers with logs
|
||||||
router.get('/logs/containers', ctx.asyncHandler(async (req, res) => {
|
router.get('/logs/containers', asyncHandler(async (req, res) => {
|
||||||
const containers = await ctx.docker.client.listContainers({ all: true });
|
const containers = await docker.client.listContainers({ all: true });
|
||||||
const containerList = containers.map(c => ({
|
const containerList = containers.map(c => ({
|
||||||
id: c.Id.slice(0, 12),
|
id: c.Id.slice(0, 12),
|
||||||
name: c.Names[0]?.replace(/^\//, '') || 'unknown',
|
name: c.Names[0]?.replace(/^\//, '') || 'unknown',
|
||||||
@@ -25,13 +35,13 @@ module.exports = function(ctx) {
|
|||||||
}, 'logs-containers'));
|
}, 'logs-containers'));
|
||||||
|
|
||||||
// Get logs for a specific container
|
// Get logs for a specific container
|
||||||
router.get('/logs/container/:id', ctx.asyncHandler(async (req, res) => {
|
router.get('/logs/container/:id', asyncHandler(async (req, res) => {
|
||||||
const containerId = req.params.id;
|
const containerId = req.params.id;
|
||||||
const tail = parseInt(req.query.tail) || 100;
|
const tail = parseInt(req.query.tail) || 100;
|
||||||
const since = req.query.since || 0;
|
const since = req.query.since || 0;
|
||||||
const timestamps = req.query.timestamps !== 'false';
|
const timestamps = req.query.timestamps !== 'false';
|
||||||
|
|
||||||
const container = ctx.docker.client.getContainer(containerId);
|
const container = docker.client.getContainer(containerId);
|
||||||
let info;
|
let info;
|
||||||
try {
|
try {
|
||||||
info = await container.inspect();
|
info = await container.inspect();
|
||||||
@@ -80,9 +90,9 @@ module.exports = function(ctx) {
|
|||||||
}, 'logs-container'));
|
}, 'logs-container'));
|
||||||
|
|
||||||
// Stream logs (SSE)
|
// Stream logs (SSE)
|
||||||
router.get('/logs/stream/:id', ctx.asyncHandler(async (req, res) => {
|
router.get('/logs/stream/:id', asyncHandler(async (req, res) => {
|
||||||
const containerId = req.params.id;
|
const containerId = req.params.id;
|
||||||
const container = ctx.docker.client.getContainer(containerId);
|
const container = docker.client.getContainer(containerId);
|
||||||
try {
|
try {
|
||||||
await container.inspect();
|
await container.inspect();
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@@ -129,7 +139,7 @@ module.exports = function(ctx) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
logStream.on('error', (err) => {
|
logStream.on('error', (err) => {
|
||||||
res.write(`data: ${JSON.stringify({ error: ctx.safeErrorMessage(err) })}\n\n`);
|
res.write(`data: ${JSON.stringify({ error: err.message || String(err) })}\n\n`);
|
||||||
res.end();
|
res.end();
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -139,9 +149,9 @@ module.exports = function(ctx) {
|
|||||||
}, 'logs-stream'));
|
}, 'logs-stream'));
|
||||||
|
|
||||||
// Get latest daily digest
|
// Get latest daily digest
|
||||||
router.get('/logs/digest/latest', ctx.asyncHandler(async (req, res) => {
|
router.get('/logs/digest/latest', asyncHandler(async (req, res) => {
|
||||||
if (!ctx.logDigest) return ctx.errorResponse(res, 503, 'Log digest not available');
|
if (!logDigest) throw new Error('Log digest not available');
|
||||||
const digest = await ctx.logDigest.getLatestDigest();
|
const digest = await logDigest.getLatestDigest();
|
||||||
if (!digest) {
|
if (!digest) {
|
||||||
return res.json({ success: true, digest: null, message: 'No digest available yet. First digest is generated at midnight.' });
|
return res.json({ success: true, digest: null, message: 'No digest available yet. First digest is generated at midnight.' });
|
||||||
}
|
}
|
||||||
@@ -149,63 +159,63 @@ module.exports = function(ctx) {
|
|||||||
}, 'logs-digest-latest'));
|
}, 'logs-digest-latest'));
|
||||||
|
|
||||||
// Get live digest data (today's accumulated stats)
|
// Get live digest data (today's accumulated stats)
|
||||||
router.get('/logs/digest/live', ctx.asyncHandler(async (req, res) => {
|
router.get('/logs/digest/live', asyncHandler(async (req, res) => {
|
||||||
if (!ctx.logDigest) return ctx.errorResponse(res, 503, 'Log digest not available');
|
if (!logDigest) throw new Error('Log digest not available');
|
||||||
const live = ctx.logDigest.getLiveData();
|
const live = logDigest.getLiveData();
|
||||||
res.json({ success: true, ...live });
|
res.json({ success: true, ...live });
|
||||||
}, 'logs-digest-live'));
|
}, 'logs-digest-live'));
|
||||||
|
|
||||||
// List available digest dates
|
// List available digest dates
|
||||||
router.get('/logs/digest/history', ctx.asyncHandler(async (req, res) => {
|
router.get('/logs/digest/history', asyncHandler(async (req, res) => {
|
||||||
if (!ctx.logDigest) return ctx.errorResponse(res, 503, 'Log digest not available');
|
if (!logDigest) throw new Error('Log digest not available');
|
||||||
const dates = await ctx.logDigest.listDigests();
|
const dates = await logDigest.listDigests();
|
||||||
res.json({ success: true, dates });
|
res.json({ success: true, dates });
|
||||||
}, 'logs-digest-history'));
|
}, 'logs-digest-history'));
|
||||||
|
|
||||||
// Generate digest on demand (for today or a specific date)
|
// Generate digest on demand (for today or a specific date)
|
||||||
router.post('/logs/digest/generate', ctx.asyncHandler(async (req, res) => {
|
router.post('/logs/digest/generate', asyncHandler(async (req, res) => {
|
||||||
if (!ctx.logDigest) return ctx.errorResponse(res, 503, 'Log digest not available');
|
if (!logDigest) throw new Error('Log digest not available');
|
||||||
const date = req.body.date || new Date().toISOString().slice(0, 10);
|
const date = req.body.date || new Date().toISOString().slice(0, 10);
|
||||||
const digest = await ctx.logDigest.generateDailyDigest(date);
|
const digest = await logDigest.generateDailyDigest(date);
|
||||||
res.json({ success: true, digest });
|
res.json({ success: true, digest });
|
||||||
}, 'logs-digest-generate'));
|
}, 'logs-digest-generate'));
|
||||||
|
|
||||||
// Get digest for a specific date (JSON)
|
// Get digest for a specific date (JSON)
|
||||||
router.get('/logs/digest/:date', ctx.asyncHandler(async (req, res) => {
|
router.get('/logs/digest/:date', asyncHandler(async (req, res) => {
|
||||||
if (!ctx.logDigest) return ctx.errorResponse(res, 503, 'Log digest not available');
|
if (!logDigest) throw new Error('Log digest not available');
|
||||||
const { date } = req.params;
|
const { date } = req.params;
|
||||||
if (!/^\d{4}-\d{2}-\d{2}$/.test(date)) {
|
if (!/^\d{4}-\d{2}-\d{2}$/.test(date)) {
|
||||||
throw new ValidationError('Invalid date format. Use YYYY-MM-DD.');
|
throw new ValidationError('Invalid date format. Use YYYY-MM-DD.');
|
||||||
}
|
}
|
||||||
const format = req.query.format || 'json';
|
const format = req.query.format || 'json';
|
||||||
if (format === 'text') {
|
if (format === 'text') {
|
||||||
const text = await ctx.logDigest.getDigestText(date);
|
const text = await logDigest.getDigestText(date);
|
||||||
if (!text) return ctx.errorResponse(res, 404, `No digest found for ${date}`);
|
if (!text) throw new NotFoundError(`Digest for ${date}`);
|
||||||
res.setHeader('Content-Type', 'text/plain');
|
res.setHeader('Content-Type', 'text/plain');
|
||||||
return res.send(text);
|
return res.send(text);
|
||||||
}
|
}
|
||||||
const digest = await ctx.logDigest.getDigestByDate(date);
|
const digest = await logDigest.getDigestByDate(date);
|
||||||
if (!digest) return ctx.errorResponse(res, 404, `No digest found for ${date}`);
|
if (!digest) throw new NotFoundError(`Digest for ${date}`);
|
||||||
res.json({ success: true, digest });
|
res.json({ success: true, digest });
|
||||||
}, 'logs-digest-date'));
|
}, 'logs-digest-date'));
|
||||||
|
|
||||||
// Get Docker disk usage snapshot
|
// Get Docker disk usage snapshot
|
||||||
router.get('/logs/docker-disk', ctx.asyncHandler(async (req, res) => {
|
router.get('/logs/docker-disk', asyncHandler(async (req, res) => {
|
||||||
if (!ctx.dockerMaintenance) return ctx.errorResponse(res, 503, 'Docker maintenance not available');
|
if (!dockerMaintenance) throw new Error('Docker maintenance not available');
|
||||||
const diskUsage = await ctx.dockerMaintenance.getDiskUsage();
|
const diskUsage = await dockerMaintenance.getDiskUsage();
|
||||||
const status = ctx.dockerMaintenance.getStatus();
|
const status = dockerMaintenance.getStatus();
|
||||||
res.json({ success: true, diskUsage, maintenance: status });
|
res.json({ success: true, diskUsage, maintenance: status });
|
||||||
}, 'logs-docker-disk'));
|
}, 'logs-docker-disk'));
|
||||||
|
|
||||||
// Trigger Docker maintenance manually
|
// Trigger Docker maintenance manually
|
||||||
router.post('/logs/docker-maintenance', ctx.asyncHandler(async (req, res) => {
|
router.post('/logs/docker-maintenance', asyncHandler(async (req, res) => {
|
||||||
if (!ctx.dockerMaintenance) return ctx.errorResponse(res, 503, 'Docker maintenance not available');
|
if (!dockerMaintenance) throw new Error('Docker maintenance not available');
|
||||||
const result = await ctx.dockerMaintenance.runMaintenance();
|
const result = await dockerMaintenance.runMaintenance();
|
||||||
res.json({ success: true, result });
|
res.json({ success: true, result });
|
||||||
}, 'logs-docker-maintenance'));
|
}, 'logs-docker-maintenance'));
|
||||||
|
|
||||||
// Get logs from a file path (for native applications)
|
// Get logs from a file path (for native applications)
|
||||||
router.get('/logs/file', ctx.asyncHandler(async (req, res) => {
|
router.get('/logs/file', asyncHandler(async (req, res) => {
|
||||||
const { path: logPath, tail = 100 } = req.query;
|
const { path: logPath, tail = 100 } = req.query;
|
||||||
|
|
||||||
if (!logPath) {
|
if (!logPath) {
|
||||||
|
|||||||
@@ -3,12 +3,19 @@ const { validateURL, validateToken } = require('../input-validator');
|
|||||||
const { paginate, parsePaginationParams } = require('../pagination');
|
const { paginate, parsePaginationParams } = require('../pagination');
|
||||||
const { ValidationError } = require('../errors');
|
const { ValidationError } = require('../errors');
|
||||||
|
|
||||||
module.exports = function(ctx) {
|
/**
|
||||||
|
* Notifications route factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.notification - Notification manager
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
module.exports = function({ notification, asyncHandler }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// GET /config — Get notification configuration (sensitive data redacted)
|
// GET /config — Get notification configuration (sensitive data redacted)
|
||||||
router.get('/config', ctx.asyncHandler(async (req, res) => {
|
router.get('/config', asyncHandler(async (req, res) => {
|
||||||
const notificationConfig = ctx.notification.getConfig();
|
const notificationConfig = notification.getConfig();
|
||||||
// Return config without sensitive data
|
// Return config without sensitive data
|
||||||
const safeConfig = {
|
const safeConfig = {
|
||||||
enabled: notificationConfig.enabled,
|
enabled: notificationConfig.enabled,
|
||||||
@@ -34,9 +41,9 @@ module.exports = function(ctx) {
|
|||||||
}, 'notifications-config-get'));
|
}, 'notifications-config-get'));
|
||||||
|
|
||||||
// POST /config — Update notification configuration
|
// POST /config — Update notification configuration
|
||||||
router.post('/config', ctx.asyncHandler(async (req, res) => {
|
router.post('/config', asyncHandler(async (req, res) => {
|
||||||
const { enabled, providers, events, healthCheck } = req.body;
|
const { enabled, providers, events, healthCheck } = req.body;
|
||||||
const notificationConfig = ctx.notification.getConfig();
|
const notificationConfig = notification.getConfig();
|
||||||
|
|
||||||
// Validate provider webhook URLs and tokens
|
// Validate provider webhook URLs and tokens
|
||||||
if (providers) {
|
if (providers) {
|
||||||
@@ -109,19 +116,19 @@ module.exports = function(ctx) {
|
|||||||
// Restart daemon if settings changed
|
// Restart daemon if settings changed
|
||||||
if (healthCheck.enabled !== wasEnabled || healthCheck.intervalMinutes) {
|
if (healthCheck.enabled !== wasEnabled || healthCheck.intervalMinutes) {
|
||||||
if (notificationConfig.healthCheck.enabled) {
|
if (notificationConfig.healthCheck.enabled) {
|
||||||
ctx.notification.startHealthDaemon();
|
notification.startHealthDaemon();
|
||||||
} else {
|
} else {
|
||||||
ctx.notification.stopHealthDaemon();
|
notification.stopHealthDaemon();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await ctx.notification.saveConfig();
|
await notification.saveConfig();
|
||||||
res.json({ success: true, message: 'Notification config updated' });
|
res.json({ success: true, message: 'Notification config updated' });
|
||||||
}, 'notifications-config-update'));
|
}, 'notifications-config-update'));
|
||||||
|
|
||||||
// POST /test — Test notification delivery
|
// POST /test — Test notification delivery
|
||||||
router.post('/test', ctx.asyncHandler(async (req, res) => {
|
router.post('/test', asyncHandler(async (req, res) => {
|
||||||
const { provider } = req.body;
|
const { provider } = req.body;
|
||||||
|
|
||||||
if (provider) {
|
if (provider) {
|
||||||
@@ -129,13 +136,13 @@ module.exports = function(ctx) {
|
|||||||
let result;
|
let result;
|
||||||
switch (provider) {
|
switch (provider) {
|
||||||
case 'discord':
|
case 'discord':
|
||||||
result = await ctx.notification.sendDiscord('Test Notification', 'This is a test notification from DashCaddy.', 'info');
|
result = await notification.sendDiscord('Test Notification', 'This is a test notification from DashCaddy.', 'info');
|
||||||
break;
|
break;
|
||||||
case 'telegram':
|
case 'telegram':
|
||||||
result = await ctx.notification.sendTelegram('Test Notification', 'This is a test notification from DashCaddy.', 'info');
|
result = await notification.sendTelegram('Test Notification', 'This is a test notification from DashCaddy.', 'info');
|
||||||
break;
|
break;
|
||||||
case 'ntfy':
|
case 'ntfy':
|
||||||
result = await ctx.notification.sendNtfy('Test Notification', 'This is a test notification from DashCaddy.', 'info');
|
result = await notification.sendNtfy('Test Notification', 'This is a test notification from DashCaddy.', 'info');
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new ValidationError('Unknown provider');
|
throw new ValidationError('Unknown provider');
|
||||||
@@ -143,14 +150,14 @@ module.exports = function(ctx) {
|
|||||||
res.json({ success: result.success, provider, error: result.error });
|
res.json({ success: result.success, provider, error: result.error });
|
||||||
} else {
|
} else {
|
||||||
// Test all enabled providers
|
// Test all enabled providers
|
||||||
const result = await ctx.notification.send('test', 'Test Notification', 'This is a test notification from DashCaddy.', 'info');
|
const result = await notification.send('test', 'Test Notification', 'This is a test notification from DashCaddy.', 'info');
|
||||||
res.json({ success: true, ...result });
|
res.json({ success: true, ...result });
|
||||||
}
|
}
|
||||||
}, 'notifications-test'));
|
}, 'notifications-test'));
|
||||||
|
|
||||||
// GET /history — Get notification history
|
// GET /history — Get notification history
|
||||||
router.get('/history', ctx.asyncHandler(async (req, res) => {
|
router.get('/history', asyncHandler(async (req, res) => {
|
||||||
const notificationHistory = ctx.notification.getHistory();
|
const notificationHistory = notification.getHistory();
|
||||||
const paginationParams = parsePaginationParams(req.query);
|
const paginationParams = parsePaginationParams(req.query);
|
||||||
if (paginationParams) {
|
if (paginationParams) {
|
||||||
const result = paginate(notificationHistory, paginationParams);
|
const result = paginate(notificationHistory, paginationParams);
|
||||||
@@ -166,19 +173,19 @@ module.exports = function(ctx) {
|
|||||||
}, 'notifications-history'));
|
}, 'notifications-history'));
|
||||||
|
|
||||||
// DELETE /history — Clear notification history
|
// DELETE /history — Clear notification history
|
||||||
router.delete('/history', ctx.asyncHandler(async (req, res) => {
|
router.delete('/history', asyncHandler(async (req, res) => {
|
||||||
ctx.notification.clearHistory();
|
notification.clearHistory();
|
||||||
res.json({ success: true, message: 'Notification history cleared' });
|
res.json({ success: true, message: 'Notification history cleared' });
|
||||||
}, 'notifications-history-clear'));
|
}, 'notifications-history-clear'));
|
||||||
|
|
||||||
// POST /health-check — Manually trigger health check
|
// POST /health-check — Manually trigger health check
|
||||||
router.post('/health-check', ctx.asyncHandler(async (req, res) => {
|
router.post('/health-check', asyncHandler(async (req, res) => {
|
||||||
await ctx.notification.checkHealth();
|
await notification.checkHealth();
|
||||||
const notificationConfig = ctx.notification.getConfig();
|
const notificationConfig = notification.getConfig();
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
lastCheck: notificationConfig.healthCheck.lastCheck,
|
lastCheck: notificationConfig.healthCheck.lastCheck,
|
||||||
containersMonitored: Object.keys(ctx.notification.getHealthState()).length
|
containersMonitored: Object.keys(notification.getHealthState()).length
|
||||||
});
|
});
|
||||||
}, 'notifications-health-check'));
|
}, 'notifications-health-check'));
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,18 @@ const { ValidationError } = require('../../errors');
|
|||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
const { DOCKER } = require('../../constants');
|
const { DOCKER } = require('../../constants');
|
||||||
|
|
||||||
module.exports = function(ctx) {
|
/**
|
||||||
|
* Recipes deployment routes factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.docker - Docker client wrapper
|
||||||
|
* @param {Object} deps.credentialManager - Credential manager
|
||||||
|
* @param {Object} deps.servicesStateManager - Services state manager
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Function} deps.errorResponse - Error response helper
|
||||||
|
* @param {Object} deps.log - Logger instance
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
module.exports = function({ docker, credentialManager, servicesStateManager, asyncHandler, errorResponse, log }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -12,14 +23,14 @@ module.exports = function(ctx) {
|
|||||||
* POST /api/recipes/deploy
|
* POST /api/recipes/deploy
|
||||||
* Body: { recipeId, config: { selectedComponents, sharedConfig, componentOverrides } }
|
* Body: { recipeId, config: { selectedComponents, sharedConfig, componentOverrides } }
|
||||||
*/
|
*/
|
||||||
router.post('/deploy', ctx.asyncHandler(async (req, res) => {
|
router.post('/deploy', asyncHandler(async (req, res) => {
|
||||||
const { recipeId, config } = req.body;
|
const { recipeId, config } = req.body;
|
||||||
const { RECIPE_TEMPLATES } = require('../../recipe-templates');
|
const { RECIPE_TEMPLATES } = require('../../recipe-templates');
|
||||||
|
|
||||||
const recipe = RECIPE_TEMPLATES[recipeId];
|
const recipe = RECIPE_TEMPLATES[recipeId];
|
||||||
if (!recipe) throw new ValidationError('Invalid recipe template', 'recipeId');
|
if (!recipe) throw new ValidationError('Invalid recipe template', 'recipeId');
|
||||||
|
|
||||||
ctx.log.info('recipe', 'Starting recipe deployment', { recipeId, name: recipe.name });
|
log.info('recipe', 'Starting recipe deployment', { recipeId, name: recipe.name });
|
||||||
|
|
||||||
// Determine which components to deploy
|
// Determine which components to deploy
|
||||||
const selectedIds = new Set(config.selectedComponents || recipe.components.filter(c => c.required).map(c => c.id));
|
const selectedIds = new Set(config.selectedComponents || recipe.components.filter(c => c.required).map(c => c.id));
|
||||||
@@ -40,18 +51,18 @@ module.exports = function(ctx) {
|
|||||||
if (recipe.network) {
|
if (recipe.network) {
|
||||||
networkName = recipe.network.name;
|
networkName = recipe.network.name;
|
||||||
try {
|
try {
|
||||||
await ctx.docker.client.createNetwork({
|
await docker.client.createNetwork({
|
||||||
Name: networkName,
|
Name: networkName,
|
||||||
Driver: recipe.network.driver || 'bridge',
|
Driver: recipe.network.driver || 'bridge',
|
||||||
Labels: { 'sami.managed': 'true', 'sami.recipe': recipeId }
|
Labels: { 'sami.managed': 'true', 'sami.recipe': recipeId }
|
||||||
});
|
});
|
||||||
ctx.log.info('recipe', 'Created Docker network', { networkName });
|
log.info('recipe', 'Created Docker network', { networkName });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Network might already exist
|
// Network might already exist
|
||||||
if (!e.message.includes('already exists')) {
|
if (!e.message.includes('already exists')) {
|
||||||
throw new Error(`Failed to create network ${networkName}: ${e.message}`);
|
throw new Error(`Failed to create network ${networkName}: ${e.message}`);
|
||||||
}
|
}
|
||||||
ctx.log.info('recipe', 'Docker network already exists', { networkName });
|
log.info('recipe', 'Docker network already exists', { networkName });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -61,7 +72,7 @@ module.exports = function(ctx) {
|
|||||||
try {
|
try {
|
||||||
for (const component of componentsToDeploy) {
|
for (const component of componentsToDeploy) {
|
||||||
try {
|
try {
|
||||||
ctx.log.info('recipe', `Deploying component: ${component.id}`, {
|
log.info('recipe', `Deploying component: ${component.id}`, {
|
||||||
role: component.role,
|
role: component.role,
|
||||||
internal: component.internal || false
|
internal: component.internal || false
|
||||||
});
|
});
|
||||||
@@ -69,11 +80,11 @@ module.exports = function(ctx) {
|
|||||||
const result = await deployComponent(component, recipe, config, generatedPasswords, networkName);
|
const result = await deployComponent(component, recipe, config, generatedPasswords, networkName);
|
||||||
deployedComponents.push(result);
|
deployedComponents.push(result);
|
||||||
|
|
||||||
ctx.log.info('recipe', `Component deployed: ${component.id}`, {
|
log.info('recipe', `Component deployed: ${component.id}`, {
|
||||||
containerId: result.containerId?.substring(0, 12)
|
containerId: result.containerId?.substring(0, 12)
|
||||||
});
|
});
|
||||||
} catch (componentError) {
|
} catch (componentError) {
|
||||||
ctx.log.error('recipe', `Component failed: ${component.id}`, {
|
log.error('recipe', `Component failed: ${component.id}`, {
|
||||||
error: componentError.message
|
error: componentError.message
|
||||||
});
|
});
|
||||||
errors.push({ componentId: component.id, role: component.role, error: componentError.message });
|
errors.push({ componentId: component.id, role: component.role, error: componentError.message });
|
||||||
@@ -104,10 +115,10 @@ module.exports = function(ctx) {
|
|||||||
|
|
||||||
// Run auto-connect if available
|
// Run auto-connect if available
|
||||||
if (recipe.autoConnect?.enabled && errors.length === 0) {
|
if (recipe.autoConnect?.enabled && errors.length === 0) {
|
||||||
ctx.log.info('recipe', 'Running auto-connect for recipe', { recipeId });
|
log.info('recipe', 'Running auto-connect for recipe', { recipeId });
|
||||||
// Auto-connect will be handled asynchronously — don't block the response
|
// Auto-connect will be handled asynchronously — don't block the response
|
||||||
runAutoConnect(recipe, deployedComponents, config).catch(e => {
|
runAutoConnect(recipe, deployedComponents, config).catch(e => {
|
||||||
ctx.log.warn('recipe', 'Auto-connect had errors', { recipeId, error: e.message });
|
log.warn('recipe', 'Auto-connect had errors', { recipeId, error: e.message });
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -136,17 +147,17 @@ module.exports = function(ctx) {
|
|||||||
|
|
||||||
res.json(response);
|
res.json(response);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
ctx.log.error('recipe', 'Recipe deployment failed', { recipeId, error: error.message });
|
log.error('recipe', 'Recipe deployment failed', { recipeId, error: error.message });
|
||||||
|
|
||||||
// Cleanup: remove partially deployed containers
|
// Cleanup: remove partially deployed containers
|
||||||
for (const deployed of deployedComponents) {
|
for (const deployed of deployedComponents) {
|
||||||
try {
|
try {
|
||||||
if (deployed.containerId) {
|
if (deployed.containerId) {
|
||||||
const container = ctx.docker.client.getContainer(deployed.containerId);
|
const container = docker.client.getContainer(deployed.containerId);
|
||||||
await container.remove({ force: true });
|
await container.remove({ force: true });
|
||||||
}
|
}
|
||||||
} catch (cleanupError) {
|
} catch (cleanupError) {
|
||||||
ctx.log.warn('recipe', 'Cleanup failed for component', {
|
log.warn('recipe', 'Cleanup failed for component', {
|
||||||
componentId: deployed.id, error: cleanupError.message
|
componentId: deployed.id, error: cleanupError.message
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -155,10 +166,10 @@ module.exports = function(ctx) {
|
|||||||
// Cleanup network
|
// Cleanup network
|
||||||
if (networkName) {
|
if (networkName) {
|
||||||
try {
|
try {
|
||||||
const network = ctx.docker.client.getNetwork(networkName);
|
const network = docker.client.getNetwork(networkName);
|
||||||
await network.remove();
|
await network.remove();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('recipe', 'Network cleanup failed', { networkName, error: e.message });
|
log.warn('recipe', 'Network cleanup failed', { networkName, error: e.message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -284,11 +295,11 @@ module.exports = function(ctx) {
|
|||||||
|
|
||||||
// Pull image
|
// Pull image
|
||||||
try {
|
try {
|
||||||
ctx.log.info('recipe', `Pulling image: ${dockerConfig.image}`);
|
log.info('recipe', `Pulling image: ${dockerConfig.image}`);
|
||||||
await ctx.docker.pull(dockerConfig.image);
|
await docker.pull(dockerConfig.image);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('recipe', `Pull failed, checking local: ${dockerConfig.image}`);
|
log.warn('recipe', `Pull failed, checking local: ${dockerConfig.image}`);
|
||||||
const images = await ctx.docker.client.listImages({
|
const images = await docker.client.listImages({
|
||||||
filters: { reference: [dockerConfig.image] }
|
filters: { reference: [dockerConfig.image] }
|
||||||
});
|
});
|
||||||
if (images.length === 0) throw new Error(`Image not found: ${dockerConfig.image}`);
|
if (images.length === 0) throw new Error(`Image not found: ${dockerConfig.image}`);
|
||||||
@@ -296,7 +307,7 @@ module.exports = function(ctx) {
|
|||||||
|
|
||||||
// Remove stale container
|
// Remove stale container
|
||||||
try {
|
try {
|
||||||
const existing = ctx.docker.client.getContainer(containerName);
|
const existing = docker.client.getContainer(containerName);
|
||||||
await existing.inspect();
|
await existing.inspect();
|
||||||
await existing.remove({ force: true });
|
await existing.remove({ force: true });
|
||||||
await new Promise(r => setTimeout(r, 1000));
|
await new Promise(r => setTimeout(r, 1000));
|
||||||
@@ -305,17 +316,17 @@ module.exports = function(ctx) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Create and start container
|
// Create and start container
|
||||||
const container = await ctx.docker.client.createContainer(containerConfig);
|
const container = await docker.client.createContainer(containerConfig);
|
||||||
await container.start();
|
await container.start();
|
||||||
|
|
||||||
// Connect to recipe network
|
// Connect to recipe network
|
||||||
if (networkName) {
|
if (networkName) {
|
||||||
try {
|
try {
|
||||||
const network = ctx.docker.client.getNetwork(networkName);
|
const network = docker.client.getNetwork(networkName);
|
||||||
await network.connect({ Container: container.id });
|
await network.connect({ Container: container.id });
|
||||||
ctx.log.info('recipe', `Connected ${component.id} to network ${networkName}`);
|
log.info('recipe', `Connected ${component.id} to network ${networkName}`);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('recipe', `Failed to connect ${component.id} to network`, { error: e.message });
|
log.warn('recipe', `Failed to connect ${component.id} to network`, { error: e.message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -332,7 +343,7 @@ module.exports = function(ctx) {
|
|||||||
await helpers.addCaddyConfig(subdomain, caddyConfig);
|
await helpers.addCaddyConfig(subdomain, caddyConfig);
|
||||||
url = `https://${ctx.buildDomain(subdomain)}`;
|
url = `https://${ctx.buildDomain(subdomain)}`;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('recipe', `Caddy config failed for ${component.id}`, { error: e.message });
|
log.warn('recipe', `Caddy config failed for ${component.id}`, { error: e.message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -360,12 +371,12 @@ module.exports = function(ctx) {
|
|||||||
|
|
||||||
for (const step of recipe.autoConnect.steps) {
|
for (const step of recipe.autoConnect.steps) {
|
||||||
try {
|
try {
|
||||||
ctx.log.info('recipe', `Auto-connect step: ${step.action}`, { targets: step.targets });
|
log.info('recipe', `Auto-connect step: ${step.action}`, { targets: step.targets });
|
||||||
// These actions map to existing Smart Arr Connect functionality
|
// These actions map to existing Smart Arr Connect functionality
|
||||||
// The actual implementation will be wired when Smart Arr Connect helpers are available
|
// The actual implementation will be wired when Smart Arr Connect helpers are available
|
||||||
ctx.log.info('recipe', `Auto-connect step ${step.action} completed`);
|
log.info('recipe', `Auto-connect step ${step.action} completed`);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('recipe', `Auto-connect step failed: ${step.action}`, { error: e.message });
|
log.warn('recipe', `Auto-connect step failed: ${step.action}`, { error: e.message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,14 +3,28 @@ const deployRoutes = require('./deploy');
|
|||||||
const manageRoutes = require('./manage');
|
const manageRoutes = require('./manage');
|
||||||
const { NotFoundError } = require('../../errors');
|
const { NotFoundError } = require('../../errors');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recipes routes aggregator
|
||||||
|
* @param {Object} ctx - Application context (for backward compatibility)
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
module.exports = function(ctx) {
|
module.exports = function(ctx) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
|
const deps = {
|
||||||
|
docker: ctx.docker,
|
||||||
|
credentialManager: ctx.credentialManager,
|
||||||
|
servicesStateManager: ctx.servicesStateManager,
|
||||||
|
asyncHandler: ctx.asyncHandler,
|
||||||
|
errorResponse: ctx.errorResponse,
|
||||||
|
log: ctx.log
|
||||||
|
};
|
||||||
|
|
||||||
// All recipe routes require premium license
|
// All recipe routes require premium license
|
||||||
router.use(ctx.licenseManager.requirePremium('recipes'));
|
router.use(ctx.licenseManager.requirePremium('recipes'));
|
||||||
|
|
||||||
// GET /api/recipes/templates — list all recipe templates
|
// GET /api/recipes/templates — list all recipe templates
|
||||||
router.get('/templates', ctx.asyncHandler(async (req, res) => {
|
router.get('/templates', deps.asyncHandler(async (req, res) => {
|
||||||
const { RECIPE_TEMPLATES, RECIPE_CATEGORIES } = require('../../recipe-templates');
|
const { RECIPE_TEMPLATES, RECIPE_CATEGORIES } = require('../../recipe-templates');
|
||||||
const templates = Object.entries(RECIPE_TEMPLATES).map(([id, recipe]) => ({
|
const templates = Object.entries(RECIPE_TEMPLATES).map(([id, recipe]) => ({
|
||||||
id,
|
id,
|
||||||
@@ -39,7 +53,7 @@ module.exports = function(ctx) {
|
|||||||
}, 'recipe-templates'));
|
}, 'recipe-templates'));
|
||||||
|
|
||||||
// GET /api/recipes/templates/:recipeId — get single recipe template detail
|
// GET /api/recipes/templates/:recipeId — get single recipe template detail
|
||||||
router.get('/templates/:recipeId', ctx.asyncHandler(async (req, res) => {
|
router.get('/templates/:recipeId', deps.asyncHandler(async (req, res) => {
|
||||||
const { RECIPE_TEMPLATES } = require('../../recipe-templates');
|
const { RECIPE_TEMPLATES } = require('../../recipe-templates');
|
||||||
const recipe = RECIPE_TEMPLATES[req.params.recipeId];
|
const recipe = RECIPE_TEMPLATES[req.params.recipeId];
|
||||||
if (!recipe) throw new NotFoundError(`Recipe template ${req.params.recipeId}`);
|
if (!recipe) throw new NotFoundError(`Recipe template ${req.params.recipeId}`);
|
||||||
@@ -48,8 +62,8 @@ module.exports = function(ctx) {
|
|||||||
}, 'recipe-template-detail'));
|
}, 'recipe-template-detail'));
|
||||||
|
|
||||||
// Mount deploy and manage sub-routes
|
// Mount deploy and manage sub-routes
|
||||||
router.use(deployRoutes(ctx));
|
router.use(deployRoutes(deps));
|
||||||
router.use(manageRoutes(ctx));
|
router.use(manageRoutes(deps));
|
||||||
|
|
||||||
return router;
|
return router;
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -2,14 +2,22 @@ const express = require('express');
|
|||||||
const { DOCKER } = require('../../constants');
|
const { DOCKER } = require('../../constants');
|
||||||
const { NotFoundError } = require('../../errors');
|
const { NotFoundError } = require('../../errors');
|
||||||
|
|
||||||
module.exports = function(ctx) {
|
module.exports = function({ servicesStateManager, asyncHandler, log }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
/**
|
||||||
|
* Recipes management routes factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.servicesStateManager - Services state manager
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Object} deps.log - Logger instance
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* GET /api/recipes/deployed — list all deployed recipes (grouped by recipeId)
|
* GET /api/recipes/deployed — list all deployed recipes (grouped by recipeId)
|
||||||
*/
|
*/
|
||||||
router.get('/deployed', ctx.asyncHandler(async (req, res) => {
|
router.get('/deployed', asyncHandler(async (req, res) => {
|
||||||
const services = await ctx.servicesStateManager.read();
|
const services = await servicesStateManager.read();
|
||||||
const recipeGroups = {};
|
const recipeGroups = {};
|
||||||
|
|
||||||
for (const service of services) {
|
for (const service of services) {
|
||||||
@@ -64,7 +72,7 @@ module.exports = function(ctx) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('recipe', 'Could not list Docker containers for recipe discovery', { error: e.message });
|
log.warn('recipe', 'Could not list Docker containers for recipe discovery', { error: e.message });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Enrich with container state
|
// Enrich with container state
|
||||||
@@ -92,7 +100,7 @@ module.exports = function(ctx) {
|
|||||||
/**
|
/**
|
||||||
* POST /api/recipes/:recipeId/start — start all containers in a recipe
|
* POST /api/recipes/:recipeId/start — start all containers in a recipe
|
||||||
*/
|
*/
|
||||||
router.post('/:recipeId/start', ctx.asyncHandler(async (req, res) => {
|
router.post('/:recipeId/start', asyncHandler(async (req, res) => {
|
||||||
const { recipeId } = req.params;
|
const { recipeId } = req.params;
|
||||||
const containers = await findRecipeContainers(recipeId);
|
const containers = await findRecipeContainers(recipeId);
|
||||||
|
|
||||||
@@ -116,14 +124,14 @@ module.exports = function(ctx) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.log.info('recipe', 'Recipe started', { recipeId, results });
|
log.info('recipe', 'Recipe started', { recipeId, results });
|
||||||
res.json({ success: true, recipeId, results });
|
res.json({ success: true, recipeId, results });
|
||||||
}, 'recipe-start'));
|
}, 'recipe-start'));
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* POST /api/recipes/:recipeId/stop — stop all containers in a recipe
|
* POST /api/recipes/:recipeId/stop — stop all containers in a recipe
|
||||||
*/
|
*/
|
||||||
router.post('/:recipeId/stop', ctx.asyncHandler(async (req, res) => {
|
router.post('/:recipeId/stop', asyncHandler(async (req, res) => {
|
||||||
const { recipeId } = req.params;
|
const { recipeId } = req.params;
|
||||||
const containers = await findRecipeContainers(recipeId);
|
const containers = await findRecipeContainers(recipeId);
|
||||||
|
|
||||||
@@ -148,14 +156,14 @@ module.exports = function(ctx) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.log.info('recipe', 'Recipe stopped', { recipeId, results });
|
log.info('recipe', 'Recipe stopped', { recipeId, results });
|
||||||
res.json({ success: true, recipeId, results });
|
res.json({ success: true, recipeId, results });
|
||||||
}, 'recipe-stop'));
|
}, 'recipe-stop'));
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* POST /api/recipes/:recipeId/restart — restart all containers in a recipe
|
* POST /api/recipes/:recipeId/restart — restart all containers in a recipe
|
||||||
*/
|
*/
|
||||||
router.post('/:recipeId/restart', ctx.asyncHandler(async (req, res) => {
|
router.post('/:recipeId/restart', asyncHandler(async (req, res) => {
|
||||||
const { recipeId } = req.params;
|
const { recipeId } = req.params;
|
||||||
const containers = await findRecipeContainers(recipeId);
|
const containers = await findRecipeContainers(recipeId);
|
||||||
|
|
||||||
@@ -174,14 +182,14 @@ module.exports = function(ctx) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.log.info('recipe', 'Recipe restarted', { recipeId, results });
|
log.info('recipe', 'Recipe restarted', { recipeId, results });
|
||||||
res.json({ success: true, recipeId, results });
|
res.json({ success: true, recipeId, results });
|
||||||
}, 'recipe-restart'));
|
}, 'recipe-restart'));
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* DELETE /api/recipes/:recipeId — remove entire recipe (containers, network, services)
|
* DELETE /api/recipes/:recipeId — remove entire recipe (containers, network, services)
|
||||||
*/
|
*/
|
||||||
router.delete('/:recipeId', ctx.asyncHandler(async (req, res) => {
|
router.delete('/:recipeId', asyncHandler(async (req, res) => {
|
||||||
const { recipeId } = req.params;
|
const { recipeId } = req.params;
|
||||||
const containers = await findRecipeContainers(recipeId);
|
const containers = await findRecipeContainers(recipeId);
|
||||||
|
|
||||||
@@ -189,7 +197,7 @@ module.exports = function(ctx) {
|
|||||||
throw new NotFoundError('Containers for recipe');
|
throw new NotFoundError('Containers for recipe');
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.log.info('recipe', 'Removing recipe', { recipeId, containerCount: containers.length });
|
log.info('recipe', 'Removing recipe', { recipeId, containerCount: containers.length });
|
||||||
|
|
||||||
const results = [];
|
const results = [];
|
||||||
const networkNames = new Set();
|
const networkNames = new Set();
|
||||||
@@ -213,7 +221,7 @@ module.exports = function(ctx) {
|
|||||||
try {
|
try {
|
||||||
await removeCaddyBlock(subdomain);
|
await removeCaddyBlock(subdomain);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('recipe', 'Failed to remove Caddy config', { subdomain, error: e.message });
|
log.warn('recipe', 'Failed to remove Caddy config', { subdomain, error: e.message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -226,7 +234,7 @@ module.exports = function(ctx) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Remove recipe services from services.json
|
// Remove recipe services from services.json
|
||||||
await ctx.servicesStateManager.update(services => {
|
await servicesStateManager.update(services => {
|
||||||
return services.filter(s => s.recipeId !== recipeId);
|
return services.filter(s => s.recipeId !== recipeId);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -235,9 +243,9 @@ module.exports = function(ctx) {
|
|||||||
try {
|
try {
|
||||||
const network = ctx.docker.client.getNetwork(netName);
|
const network = ctx.docker.client.getNetwork(netName);
|
||||||
await network.remove();
|
await network.remove();
|
||||||
ctx.log.info('recipe', 'Removed Docker network', { netName });
|
log.info('recipe', 'Removed Docker network', { netName });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('recipe', 'Failed to remove network', { netName, error: e.message });
|
log.warn('recipe', 'Failed to remove network', { netName, error: e.message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -246,7 +254,7 @@ module.exports = function(ctx) {
|
|||||||
'info'
|
'info'
|
||||||
);
|
);
|
||||||
|
|
||||||
ctx.log.info('recipe', 'Recipe removed', { recipeId, results });
|
log.info('recipe', 'Recipe removed', { recipeId, results });
|
||||||
res.json({ success: true, recipeId, results });
|
res.json({ success: true, recipeId, results });
|
||||||
}, 'recipe-remove'));
|
}, 'recipe-remove'));
|
||||||
|
|
||||||
|
|||||||
@@ -1,29 +1,43 @@
|
|||||||
const express = require('express');
|
const express = require('express');
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const { CADDY, REGEX, LIMITS } = require('../constants');
|
const { CADDY, REGEX, LIMITS } = require('../constants');
|
||||||
const { ValidationError } = require('../errors');
|
const { ValidationError, ConflictError, NotFoundError } = require('../errors');
|
||||||
|
const { validateURL } = require('../input-validator');
|
||||||
|
|
||||||
module.exports = function(ctx) {
|
/**
|
||||||
|
* Sites route factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Object} deps.caddy - Caddy manager
|
||||||
|
* @param {Object} deps.dns - DNS manager
|
||||||
|
* @param {Function} deps.fetchT - Fetch with timeout
|
||||||
|
* @param {Function} deps.buildDomain - Domain builder function
|
||||||
|
* @param {Function} deps.addServiceToConfig - Service config adder
|
||||||
|
* @param {Object} deps.siteConfig - Site configuration
|
||||||
|
* @param {Object} deps.log - Logger instance
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
module.exports = function({ asyncHandler, caddy, dns, fetchT, buildDomain, addServiceToConfig, siteConfig, log }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// Get Caddyfile contents
|
// Get Caddyfile contents
|
||||||
router.get('/caddyfile', ctx.asyncHandler(async (req, res) => {
|
router.get('/caddyfile', asyncHandler(async (req, res) => {
|
||||||
const content = await ctx.caddy.read();
|
const content = await caddy.read();
|
||||||
res.json({ success: true, content });
|
res.json({ success: true, content });
|
||||||
}, 'caddyfile-get'));
|
}, 'caddyfile-get'));
|
||||||
|
|
||||||
// Get current Caddy config (from admin API)
|
// Get current Caddy config (from admin API)
|
||||||
router.get('/caddy/config', ctx.asyncHandler(async (req, res) => {
|
router.get('/caddy/config', asyncHandler(async (req, res) => {
|
||||||
const response = await ctx.fetchT(`${ctx.caddy.adminUrl}/config/`);
|
const response = await fetchT(`${caddy.adminUrl}/config/`);
|
||||||
const config = await response.json();
|
const config = await response.json();
|
||||||
res.json({ success: true, config });
|
res.json({ success: true, config });
|
||||||
}, 'caddy-config'));
|
}, 'caddy-config'));
|
||||||
|
|
||||||
// Reload Caddy configuration via admin API
|
// Reload Caddy configuration via admin API
|
||||||
router.post('/caddy/reload', ctx.asyncHandler(async (req, res) => {
|
router.post('/caddy/reload', asyncHandler(async (req, res) => {
|
||||||
const caddyfileContent = await ctx.caddy.read();
|
const caddyfileContent = await caddy.read();
|
||||||
|
|
||||||
const response = await ctx.fetchT(`${ctx.caddy.adminUrl}/load`, {
|
const response = await fetchT(`${caddy.adminUrl}/load`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': CADDY.CONTENT_TYPE },
|
headers: { 'Content-Type': CADDY.CONTENT_TYPE },
|
||||||
body: caddyfileContent
|
body: caddyfileContent
|
||||||
@@ -31,16 +45,16 @@ module.exports = function(ctx) {
|
|||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
const errorText = await response.text();
|
const errorText = await response.text();
|
||||||
ctx.log.error('caddy', 'Caddy reload failed', { error: errorText });
|
log.error('caddy', 'Caddy reload failed', { error: errorText });
|
||||||
return ctx.errorResponse(res, 500, '[DC-303] Caddy reload failed. Check server logs for details.');
|
throw new Error('Caddy reload failed. Check server logs for details.');
|
||||||
}
|
}
|
||||||
|
|
||||||
res.json({ success: true, message: 'Caddy configuration reloaded successfully' });
|
res.json({ success: true, message: 'Caddy configuration reloaded successfully' });
|
||||||
}, 'caddy-reload'));
|
}, 'caddy-reload'));
|
||||||
|
|
||||||
// Get Certificate Authorities from Caddyfile
|
// Get Certificate Authorities from Caddyfile
|
||||||
router.get('/caddy/cas', ctx.asyncHandler(async (req, res) => {
|
router.get('/caddy/cas', asyncHandler(async (req, res) => {
|
||||||
const content = await ctx.caddy.read();
|
const content = await caddy.read();
|
||||||
const cas = [];
|
const cas = [];
|
||||||
|
|
||||||
const pkiRegex = /pki\s*\{([^}]*(?:\{[^}]*\}[^}]*)*)\}/gs;
|
const pkiRegex = /pki\s*\{([^}]*(?:\{[^}]*\}[^}]*)*)\}/gs;
|
||||||
@@ -117,11 +131,11 @@ module.exports = function(ctx) {
|
|||||||
}, 'caddy-get-cas'));
|
}, 'caddy-get-cas'));
|
||||||
|
|
||||||
// Remove a site from Caddyfile
|
// Remove a site from Caddyfile
|
||||||
router.delete('/site/:domain', ctx.asyncHandler(async (req, res) => {
|
router.delete('/site/:domain', asyncHandler(async (req, res) => {
|
||||||
const { domain } = req.params;
|
const { domain } = req.params;
|
||||||
if (!domain) throw new ValidationError('Domain is required');
|
if (!domain) throw new ValidationError('Domain is required');
|
||||||
|
|
||||||
const result = await ctx.caddy.modify((content) => {
|
const result = await caddy.modify((content) => {
|
||||||
const escapedDomain = domain.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
const escapedDomain = domain.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||||
const siteBlockRegex = new RegExp(
|
const siteBlockRegex = new RegExp(
|
||||||
`\\n?${escapedDomain}\\s*\\{[^{}]*(?:\\{[^{}]*(?:\\{[^{}]*\\}[^{}]*)*\\}[^{}]*)*\\}\\s*`, 'g'
|
`\\n?${escapedDomain}\\s*\\{[^{}]*(?:\\{[^{}]*(?:\\{[^{}]*\\}[^{}]*)*\\}[^{}]*)*\\}\\s*`, 'g'
|
||||||
@@ -133,16 +147,16 @@ module.exports = function(ctx) {
|
|||||||
|
|
||||||
if (!result.success) {
|
if (!result.success) {
|
||||||
if (result.rolledBack) {
|
if (result.rolledBack) {
|
||||||
return ctx.errorResponse(res, 500, `Removed "${domain}" but Caddy reload failed (rolled back): ${result.error}`);
|
throw new Error( `Removed "${domain}" but Caddy reload failed (rolled back): ${result.error}`);
|
||||||
}
|
}
|
||||||
return ctx.errorResponse(res, 404, `Site block for "${domain}" not found in Caddyfile`);
|
throw new NotFoundError(`Site block for "" in Caddyfile`);
|
||||||
}
|
}
|
||||||
|
|
||||||
res.json({ success: true, message: `Site "${domain}" removed from Caddyfile and Caddy reloaded` });
|
res.json({ success: true, message: `Site "${domain}" removed from Caddyfile and Caddy reloaded` });
|
||||||
}, 'site-delete'));
|
}, 'site-delete'));
|
||||||
|
|
||||||
// Add a new site to Caddyfile and reload
|
// Add a new site to Caddyfile and reload
|
||||||
router.post('/site', ctx.asyncHandler(async (req, res) => {
|
router.post('/site', asyncHandler(async (req, res) => {
|
||||||
const { domain, upstream, config } = req.body;
|
const { domain, upstream, config } = req.body;
|
||||||
if (!domain || !upstream) throw new ValidationError('Domain and upstream are required');
|
if (!domain || !upstream) throw new ValidationError('Domain and upstream are required');
|
||||||
if (!REGEX.DOMAIN.test(domain)) throw new ValidationError('[DC-301] Invalid domain format');
|
if (!REGEX.DOMAIN.test(domain)) throw new ValidationError('[DC-301] Invalid domain format');
|
||||||
@@ -150,27 +164,27 @@ module.exports = function(ctx) {
|
|||||||
const upstreamRegex = /^[a-z0-9.-]+:\d{1,5}$/i;
|
const upstreamRegex = /^[a-z0-9.-]+:\d{1,5}$/i;
|
||||||
if (!upstreamRegex.test(upstream)) throw new ValidationError('Invalid upstream format. Use host:port');
|
if (!upstreamRegex.test(upstream)) throw new ValidationError('Invalid upstream format. Use host:port');
|
||||||
|
|
||||||
let content = await ctx.caddy.read();
|
let content = await caddy.read();
|
||||||
const escapedDomain = domain.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
const escapedDomain = domain.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||||
const siteBlockRegex = new RegExp(`\\n?${escapedDomain}\\s*\\{`, 'g');
|
const siteBlockRegex = new RegExp(`\\n?${escapedDomain}\\s*\\{`, 'g');
|
||||||
if (siteBlockRegex.test(content)) {
|
if (siteBlockRegex.test(content)) {
|
||||||
return ctx.errorResponse(res, 409, `[DC-302] Site block for "${domain}" already exists in Caddyfile`);
|
throw new ConflictError(`Site block for "" already exists in Caddyfile`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Always generate structured config — never allow raw Caddy config injection
|
// Always generate structured config — never allow raw Caddy config injection
|
||||||
const newSiteBlock = `\n${domain} {\n reverse_proxy ${upstream}\n tls internal\n}\n`;
|
const newSiteBlock = `\n${domain} {\n reverse_proxy ${upstream}\n tls internal\n}\n`;
|
||||||
|
|
||||||
const result = await ctx.caddy.modify(c => c + newSiteBlock);
|
const result = await caddy.modify(c => c + newSiteBlock);
|
||||||
if (!result.success) {
|
if (!result.success) {
|
||||||
return ctx.errorResponse(res, 500, `[DC-303] Site added to Caddyfile but reload failed: ${result.error}`,
|
throw new Error( `[DC-303] Site added to Caddyfile but reload failed: ${result.error}`,
|
||||||
result.rolledBack ? { note: 'Caddyfile was rolled back to previous state' } : {});
|
result.rolledBack ? { note: 'Caddyfile was rolled back to previous state' } : {});
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.ok(res, { message: `Site "${domain}" added to Caddyfile and Caddy reloaded successfully` });
|
res.json({ success: true, message: `Site "${domain}" added to Caddyfile and Caddy reloaded successfully` });
|
||||||
}, 'site-add'));
|
}, 'site-add'));
|
||||||
|
|
||||||
// Add external service reverse proxy to Caddyfile
|
// Add external service reverse proxy to Caddyfile
|
||||||
router.post('/site/external', ctx.asyncHandler(async (req, res) => {
|
router.post('/site/external', asyncHandler(async (req, res) => {
|
||||||
const { subdomain, externalUrl, preserveHost, followRedirects, sslType, caddyfilePath, reloadCaddy: shouldReload, createDns, serviceName, logo } = req.body;
|
const { subdomain, externalUrl, preserveHost, followRedirects, sslType, caddyfilePath, reloadCaddy: shouldReload, createDns, serviceName, logo } = req.body;
|
||||||
|
|
||||||
if (!subdomain || !externalUrl) {
|
if (!subdomain || !externalUrl) {
|
||||||
@@ -181,22 +195,22 @@ module.exports = function(ctx) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
ctx.validateURL(externalUrl);
|
validateURL(externalUrl);
|
||||||
} catch (validationErr) {
|
} catch (validationErr) {
|
||||||
return ctx.errorResponse(res, 400, validationErr.message);
|
throw new ValidationError(validationErr.message);
|
||||||
}
|
}
|
||||||
|
|
||||||
const domain = ctx.buildDomain(subdomain);
|
const domain = buildDomain(subdomain);
|
||||||
let dnsWarning = null;
|
let dnsWarning = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (createDns) {
|
if (createDns) {
|
||||||
try {
|
try {
|
||||||
await ctx.dns.createRecord(subdomain, ctx.siteConfig.dnsServerIp);
|
await dns.createRecord(subdomain, siteConfig.dnsServerIp);
|
||||||
ctx.log.info('dns', 'DNS record created for external proxy', { domain, ip: ctx.siteConfig.dnsServerIp });
|
log.info('dns', 'DNS record created for external proxy', { domain, ip: siteConfig.dnsServerIp });
|
||||||
} catch (dnsError) {
|
} catch (dnsError) {
|
||||||
dnsWarning = `DNS creation failed: ${dnsError.message}. You may need to create the DNS record manually.`;
|
dnsWarning = `DNS creation failed: ${dnsError.message}. You may need to create the DNS record manually.`;
|
||||||
ctx.log.warn('dns', 'DNS creation failed for external proxy', { domain, error: dnsError.message });
|
log.warn('dns', 'DNS creation failed for external proxy', { domain, error: dnsError.message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -221,29 +235,29 @@ module.exports = function(ctx) {
|
|||||||
proxyConfig = `\n${domain} {\n ${sslConfig}\n\n reverse_proxy ${externalUrl} {${hostHeader}\n transport http {\n tls\n }\n }\n}\n`;
|
proxyConfig = `\n${domain} {\n ${sslConfig}\n\n reverse_proxy ${externalUrl} {${hostHeader}\n transport http {\n tls\n }\n }\n}\n`;
|
||||||
}
|
}
|
||||||
|
|
||||||
const caddyResult = await ctx.caddy.modify(c => {
|
const caddyResult = await caddy.modify(c => {
|
||||||
const escapedDomain = domain.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
const escapedDomain = domain.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||||
if (new RegExp(`\\n?${escapedDomain}\\s*\\{`, 'g').test(c)) return null;
|
if (new RegExp(`\\n?${escapedDomain}\\s*\\{`, 'g').test(c)) return null;
|
||||||
return c + proxyConfig;
|
return c + proxyConfig;
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!caddyResult.success && !caddyResult.rolledBack) {
|
if (!caddyResult.success && !caddyResult.rolledBack) {
|
||||||
return ctx.errorResponse(res, 409, `[DC-302] Site block for "${domain}" already exists in Caddyfile`);
|
throw new ConflictError(`Site block for "" already exists in Caddyfile`);
|
||||||
}
|
}
|
||||||
if (!caddyResult.success) {
|
if (!caddyResult.success) {
|
||||||
return ctx.errorResponse(res, 500, `[DC-303] External proxy added but Caddy reload failed (rolled back): ${caddyResult.error}`);
|
throw new Error( `[DC-303] External proxy added but Caddy reload failed (rolled back): ${caddyResult.error}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (serviceName && logo) {
|
if (serviceName && logo) {
|
||||||
try {
|
try {
|
||||||
await ctx.addServiceToConfig({
|
await addServiceToConfig({
|
||||||
id: subdomain, name: serviceName, logo,
|
id: subdomain, name: serviceName, logo,
|
||||||
isExternal: true, externalUrl,
|
isExternal: true, externalUrl,
|
||||||
deployedAt: new Date().toISOString()
|
deployedAt: new Date().toISOString()
|
||||||
});
|
});
|
||||||
ctx.log.info('deploy', 'Service added to dashboard', { subdomain });
|
log.info('deploy', 'Service added to dashboard', { subdomain });
|
||||||
} catch (serviceError) {
|
} catch (serviceError) {
|
||||||
ctx.log.warn('deploy', 'Failed to add service to dashboard', { subdomain, error: serviceError.message });
|
log.warn('deploy', 'Failed to add service to dashboard', { subdomain, error: serviceError.message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -254,7 +268,7 @@ module.exports = function(ctx) {
|
|||||||
if (dnsWarning) response.warning = dnsWarning;
|
if (dnsWarning) response.warning = dnsWarning;
|
||||||
res.json(response);
|
res.json(response);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
ctx.errorResponse(res, 500, ctx.safeErrorMessage(error));
|
throw error;
|
||||||
}
|
}
|
||||||
}, 'site-external'));
|
}, 'site-external'));
|
||||||
|
|
||||||
|
|||||||
@@ -2,14 +2,37 @@ const express = require('express');
|
|||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const { TAILSCALE } = require('../constants');
|
const { TAILSCALE } = require('../constants');
|
||||||
const { exists } = require('../fs-helpers');
|
const { exists } = require('../fs-helpers');
|
||||||
|
const { ValidationError, NotFoundError } = require('../errors');
|
||||||
|
|
||||||
module.exports = function(ctx) {
|
/**
|
||||||
|
* Tailscale route factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.tailscale - Tailscale manager
|
||||||
|
* @param {Object} deps.caddy - Caddy manager
|
||||||
|
* @param {Object} deps.servicesStateManager - Services state manager
|
||||||
|
* @param {Object} deps.credentialManager - Credential manager
|
||||||
|
* @param {Function} deps.buildDomain - Domain builder function
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {string} deps.SERVICES_FILE - Path to services.json
|
||||||
|
* @param {Object} deps.log - Logger instance
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
module.exports = function({
|
||||||
|
tailscale,
|
||||||
|
caddy,
|
||||||
|
servicesStateManager,
|
||||||
|
credentialManager,
|
||||||
|
buildDomain,
|
||||||
|
asyncHandler,
|
||||||
|
SERVICES_FILE,
|
||||||
|
log
|
||||||
|
}) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// Get Tailscale status and configuration
|
// Get Tailscale status and configuration
|
||||||
router.get('/status', ctx.asyncHandler(async (req, res) => {
|
router.get('/status', asyncHandler(async (req, res) => {
|
||||||
const status = await ctx.tailscale.getStatus();
|
const status = await tailscale.getStatus();
|
||||||
const localIP = await ctx.tailscale.getLocalIP();
|
const localIP = await tailscale.getLocalIP();
|
||||||
|
|
||||||
if (!status) {
|
if (!status) {
|
||||||
return res.json({
|
return res.json({
|
||||||
@@ -46,37 +69,37 @@ module.exports = function(ctx) {
|
|||||||
tailnetName: status.MagicDNSSuffix,
|
tailnetName: status.MagicDNSSuffix,
|
||||||
online: status.Self?.Online
|
online: status.Self?.Online
|
||||||
},
|
},
|
||||||
config: ctx.tailscale.config,
|
config: tailscale.config,
|
||||||
devices,
|
devices,
|
||||||
deviceCount: devices.length
|
deviceCount: devices.length
|
||||||
});
|
});
|
||||||
}, 'tailscale-status'));
|
}, 'tailscale-status'));
|
||||||
|
|
||||||
// Update Tailscale configuration
|
// Update Tailscale configuration
|
||||||
router.post('/config', ctx.asyncHandler(async (req, res) => {
|
router.post('/config', asyncHandler(async (req, res) => {
|
||||||
const { enabled, requireAuth, allowedTailnet } = req.body;
|
const { enabled, requireAuth, allowedTailnet } = req.body;
|
||||||
|
|
||||||
if (typeof enabled !== 'undefined') ctx.tailscale.config.enabled = enabled;
|
if (typeof enabled !== 'undefined') tailscale.config.enabled = enabled;
|
||||||
if (typeof requireAuth !== 'undefined') ctx.tailscale.config.requireAuth = requireAuth;
|
if (typeof requireAuth !== 'undefined') tailscale.config.requireAuth = requireAuth;
|
||||||
if (typeof allowedTailnet !== 'undefined') ctx.tailscale.config.allowedTailnet = allowedTailnet;
|
if (typeof allowedTailnet !== 'undefined') tailscale.config.allowedTailnet = allowedTailnet;
|
||||||
|
|
||||||
await ctx.tailscale.save();
|
await tailscale.save();
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
message: 'Tailscale configuration updated',
|
message: 'Tailscale configuration updated',
|
||||||
config: ctx.tailscale.config
|
config: tailscale.config
|
||||||
});
|
});
|
||||||
}, 'tailscale-config'));
|
}, 'tailscale-config'));
|
||||||
|
|
||||||
// Check if a request is coming from Tailscale
|
// Check if a request is coming from Tailscale
|
||||||
router.get('/check-connection', ctx.asyncHandler(async (req, res) => {
|
router.get('/check-connection', asyncHandler(async (req, res) => {
|
||||||
const clientIP = req.ip || req.connection?.remoteAddress || '';
|
const clientIP = req.ip || req.connection?.remoteAddress || '';
|
||||||
const forwardedFor = req.headers['x-forwarded-for'];
|
const forwardedFor = req.headers['x-forwarded-for'];
|
||||||
const realIP = req.headers['x-real-ip'];
|
const realIP = req.headers['x-real-ip'];
|
||||||
|
|
||||||
const ipsToCheck = [clientIP, forwardedFor, realIP].filter(Boolean);
|
const ipsToCheck = [clientIP, forwardedFor, realIP].filter(Boolean);
|
||||||
const isTailscale = ipsToCheck.some(ip => ctx.tailscale.isTailscaleIP(ip.toString().split(',')[0].trim()));
|
const isTailscale = ipsToCheck.some(ip => tailscale.isTailscaleIP(ip.toString().split(',')[0].trim()));
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@@ -88,8 +111,8 @@ module.exports = function(ctx) {
|
|||||||
}, 'tailscale-check'));
|
}, 'tailscale-check'));
|
||||||
|
|
||||||
// Get Tailscale device list
|
// Get Tailscale device list
|
||||||
router.get('/devices', ctx.asyncHandler(async (req, res) => {
|
router.get('/devices', asyncHandler(async (req, res) => {
|
||||||
const status = await ctx.tailscale.getStatus();
|
const status = await tailscale.getStatus();
|
||||||
if (!status || !status.Peer) {
|
if (!status || !status.Peer) {
|
||||||
return res.json({ success: true, devices: [] });
|
return res.json({ success: true, devices: [] });
|
||||||
}
|
}
|
||||||
@@ -122,15 +145,15 @@ module.exports = function(ctx) {
|
|||||||
}, 'tailscale-devices'));
|
}, 'tailscale-devices'));
|
||||||
|
|
||||||
// Toggle Tailscale-only mode for an existing service
|
// Toggle Tailscale-only mode for an existing service
|
||||||
router.post('/protect-service', ctx.asyncHandler(async (req, res) => {
|
router.post('/protect-service', asyncHandler(async (req, res) => {
|
||||||
const { subdomain, tailscaleOnly, allowedIPs } = req.body;
|
const { subdomain, tailscaleOnly, allowedIPs } = req.body;
|
||||||
|
|
||||||
if (!subdomain) {
|
if (!subdomain) {
|
||||||
throw new ValidationError('subdomain is required');
|
throw new ValidationError('subdomain is required');
|
||||||
}
|
}
|
||||||
|
|
||||||
let content = await ctx.caddy.read();
|
let content = await caddy.read();
|
||||||
const domain = ctx.buildDomain(subdomain);
|
const domain = buildDomain(subdomain);
|
||||||
|
|
||||||
const blockRegex = new RegExp(`(${domain.replace('.', '\\.')}\\s*\\{[^}]*\\})`, 's');
|
const blockRegex = new RegExp(`(${domain.replace('.', '\\.')}\\s*\\{[^}]*\\})`, 's');
|
||||||
const match = content.match(blockRegex);
|
const match = content.match(blockRegex);
|
||||||
@@ -147,18 +170,18 @@ module.exports = function(ctx) {
|
|||||||
|
|
||||||
const [ip, port] = proxyMatch[1].split(':');
|
const [ip, port] = proxyMatch[1].split(':');
|
||||||
|
|
||||||
const newConfig = ctx.caddy.generateConfig(subdomain, ip, port || '80', {
|
const newConfig = caddy.generateConfig(subdomain, ip, port || '80', {
|
||||||
tailscaleOnly: tailscaleOnly !== false,
|
tailscaleOnly: tailscaleOnly !== false,
|
||||||
allowedIPs: allowedIPs || []
|
allowedIPs: allowedIPs || []
|
||||||
});
|
});
|
||||||
|
|
||||||
const caddyResult = await ctx.caddy.modify(c => c.replace(blockRegex, newConfig));
|
const caddyResult = await caddy.modify(c => c.replace(blockRegex, newConfig));
|
||||||
if (!caddyResult.success) {
|
if (!caddyResult.success) {
|
||||||
return ctx.errorResponse(res, 500, `[DC-303] Failed to reload Caddy: ${caddyResult.error}`);
|
throw new Error(`Failed to reload Caddy: ${caddyResult.error}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (await exists(ctx.SERVICES_FILE)) {
|
if (await exists(SERVICES_FILE)) {
|
||||||
await ctx.servicesStateManager.update(services => {
|
await servicesStateManager.update(services => {
|
||||||
const serviceIndex = services.findIndex(s => s.id === subdomain);
|
const serviceIndex = services.findIndex(s => s.id === subdomain);
|
||||||
if (serviceIndex !== -1) {
|
if (serviceIndex !== -1) {
|
||||||
services[serviceIndex].tailscaleOnly = tailscaleOnly !== false;
|
services[serviceIndex].tailscaleOnly = tailscaleOnly !== false;
|
||||||
@@ -177,7 +200,7 @@ module.exports = function(ctx) {
|
|||||||
// ── Tailscale API Integration (OAuth 2.0) ──
|
// ── Tailscale API Integration (OAuth 2.0) ──
|
||||||
|
|
||||||
// Save OAuth client credentials + validate by exchanging for a token
|
// Save OAuth client credentials + validate by exchanging for a token
|
||||||
router.post('/oauth-config', ctx.asyncHandler(async (req, res) => {
|
router.post('/oauth-config', asyncHandler(async (req, res) => {
|
||||||
const { clientId, clientSecret, tailnet } = req.body;
|
const { clientId, clientSecret, tailnet } = req.body;
|
||||||
|
|
||||||
if (!clientId || !clientSecret || !tailnet) {
|
if (!clientId || !clientSecret || !tailnet) {
|
||||||
@@ -192,7 +215,7 @@ module.exports = function(ctx) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (!tokenRes.ok) {
|
if (!tokenRes.ok) {
|
||||||
return ctx.errorResponse(res, 400, `OAuth validation failed: HTTP ${tokenRes.status}`);
|
throw new ValidationError(`OAuth validation failed: HTTP ${tokenRes.status}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const tokenData = await tokenRes.json();
|
const tokenData = await tokenRes.json();
|
||||||
@@ -203,85 +226,85 @@ module.exports = function(ctx) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (!testRes.ok) {
|
if (!testRes.ok) {
|
||||||
return ctx.errorResponse(res, 400, `API test failed: HTTP ${testRes.status}. Check tailnet name and OAuth scopes (needs devices:read, acl:read).`);
|
throw new ValidationError(`API test failed: HTTP ${testRes.status}. Check tailnet name and OAuth scopes (needs devices:read, acl:read).`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store credentials securely
|
// Store credentials securely
|
||||||
await ctx.credentialManager.store('tailscale.oauth.client_id', clientId, { provider: 'tailscale' });
|
await credentialManager.store('tailscale.oauth.client_id', clientId, { provider: 'tailscale' });
|
||||||
await ctx.credentialManager.store('tailscale.oauth.client_secret', clientSecret, { provider: 'tailscale', tailnet });
|
await credentialManager.store('tailscale.oauth.client_secret', clientSecret, { provider: 'tailscale', tailnet });
|
||||||
|
|
||||||
// Update config
|
// Update config
|
||||||
ctx.tailscale.config.oauthConfigured = true;
|
tailscale.config.oauthConfigured = true;
|
||||||
ctx.tailscale.config.tailnet = tailnet;
|
tailscale.config.tailnet = tailnet;
|
||||||
if (!ctx.tailscale.config.allowedTailnet) {
|
if (!tailscale.config.allowedTailnet) {
|
||||||
const status = await ctx.tailscale.getStatus();
|
const status = await tailscale.getStatus();
|
||||||
if (status?.MagicDNSSuffix) {
|
if (status?.MagicDNSSuffix) {
|
||||||
ctx.tailscale.config.allowedTailnet = status.MagicDNSSuffix;
|
tailscale.config.allowedTailnet = status.MagicDNSSuffix;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
await ctx.tailscale.save();
|
await tailscale.save();
|
||||||
|
|
||||||
// Start background sync
|
// Start background sync
|
||||||
ctx.tailscale.startSync();
|
tailscale.startSync();
|
||||||
|
|
||||||
// Trigger initial sync
|
// Trigger initial sync
|
||||||
try {
|
try {
|
||||||
await ctx.tailscale.syncAPI();
|
await tailscale.syncAPI();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
ctx.log.warn('tailscale', 'Initial sync after OAuth config failed', { error: e.message });
|
log.warn('tailscale', 'Initial sync after OAuth config failed', { error: e.message });
|
||||||
}
|
}
|
||||||
|
|
||||||
res.json({ success: true, config: ctx.tailscale.config });
|
res.json({ success: true, config: tailscale.config });
|
||||||
}, 'tailscale-oauth-config'));
|
}, 'tailscale-oauth-config'));
|
||||||
|
|
||||||
// Remove OAuth credentials and disable API sync
|
// Remove OAuth credentials and disable API sync
|
||||||
router.delete('/oauth-config', ctx.asyncHandler(async (req, res) => {
|
router.delete('/oauth-config', asyncHandler(async (req, res) => {
|
||||||
await ctx.credentialManager.delete('tailscale.oauth.client_id');
|
await credentialManager.delete('tailscale.oauth.client_id');
|
||||||
await ctx.credentialManager.delete('tailscale.oauth.client_secret');
|
await credentialManager.delete('tailscale.oauth.client_secret');
|
||||||
|
|
||||||
ctx.tailscale.config.oauthConfigured = false;
|
tailscale.config.oauthConfigured = false;
|
||||||
ctx.tailscale.config.tailnet = null;
|
tailscale.config.tailnet = null;
|
||||||
ctx.tailscale.config.lastSync = null;
|
tailscale.config.lastSync = null;
|
||||||
await ctx.tailscale.save();
|
await tailscale.save();
|
||||||
|
|
||||||
ctx.tailscale.stopSync();
|
tailscale.stopSync();
|
||||||
|
|
||||||
res.json({ success: true, message: 'Tailscale OAuth credentials removed' });
|
res.json({ success: true, message: 'Tailscale OAuth credentials removed' });
|
||||||
}, 'tailscale-oauth-delete'));
|
}, 'tailscale-oauth-delete'));
|
||||||
|
|
||||||
// Get enriched device list from Tailscale API
|
// Get enriched device list from Tailscale API
|
||||||
router.get('/api-devices', ctx.asyncHandler(async (req, res) => {
|
router.get('/api-devices', asyncHandler(async (req, res) => {
|
||||||
if (!ctx.tailscale.config.oauthConfigured) {
|
if (!tailscale.config.oauthConfigured) {
|
||||||
throw new ValidationError('Tailscale API not configured. Set up OAuth first.');
|
throw new ValidationError('Tailscale API not configured. Set up OAuth first.');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return cached devices from last sync
|
// Return cached devices from last sync
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
devices: ctx.tailscale.config.devices || [],
|
devices: tailscale.config.devices || [],
|
||||||
lastSync: ctx.tailscale.config.lastSync
|
lastSync: tailscale.config.lastSync
|
||||||
});
|
});
|
||||||
}, 'tailscale-api-devices'));
|
}, 'tailscale-api-devices'));
|
||||||
|
|
||||||
// Manually trigger an API sync
|
// Manually trigger an API sync
|
||||||
router.post('/sync', ctx.asyncHandler(async (req, res) => {
|
router.post('/sync', asyncHandler(async (req, res) => {
|
||||||
if (!ctx.tailscale.config.oauthConfigured) {
|
if (!tailscale.config.oauthConfigured) {
|
||||||
throw new ValidationError('Tailscale API not configured. Set up OAuth first.');
|
throw new ValidationError('Tailscale API not configured. Set up OAuth first.');
|
||||||
}
|
}
|
||||||
|
|
||||||
const devices = await ctx.tailscale.syncAPI();
|
const devices = await tailscale.syncAPI();
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
devices: devices || [],
|
devices: devices || [],
|
||||||
lastSync: ctx.tailscale.config.lastSync
|
lastSync: tailscale.config.lastSync
|
||||||
});
|
});
|
||||||
}, 'tailscale-sync'));
|
}, 'tailscale-sync'));
|
||||||
|
|
||||||
// Fetch ACL policy (read-only)
|
// Fetch ACL policy (read-only)
|
||||||
router.get('/acl', ctx.asyncHandler(async (req, res) => {
|
router.get('/acl', asyncHandler(async (req, res) => {
|
||||||
const token = await ctx.tailscale.getAccessToken();
|
const token = await tailscale.getAccessToken();
|
||||||
const tailnet = ctx.tailscale.config.tailnet;
|
const tailnet = tailscale.config.tailnet;
|
||||||
if (!token || !tailnet) {
|
if (!token || !tailnet) {
|
||||||
throw new ValidationError('Tailscale API not configured');
|
throw new ValidationError('Tailscale API not configured');
|
||||||
}
|
}
|
||||||
@@ -290,7 +313,7 @@ module.exports = function(ctx) {
|
|||||||
headers: { Authorization: `Bearer ${token}`, Accept: 'application/json' }
|
headers: { Authorization: `Bearer ${token}`, Accept: 'application/json' }
|
||||||
});
|
});
|
||||||
if (!aclRes.ok) {
|
if (!aclRes.ok) {
|
||||||
return ctx.errorResponse(res, aclRes.status, `ACL fetch failed: HTTP ${aclRes.status}`);
|
throw new Error(`ACL fetch failed: HTTP ${aclRes.status}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const acl = await aclRes.json();
|
const acl = await aclRes.json();
|
||||||
|
|||||||
@@ -2,87 +2,96 @@ const express = require('express');
|
|||||||
const { paginate, parsePaginationParams } = require('../pagination');
|
const { paginate, parsePaginationParams } = require('../pagination');
|
||||||
const { ValidationError } = require('../errors');
|
const { ValidationError } = require('../errors');
|
||||||
|
|
||||||
module.exports = function(ctx) {
|
/**
|
||||||
|
* Updates route factory
|
||||||
|
* @param {Object} deps - Explicit dependencies
|
||||||
|
* @param {Object} deps.updateManager - Container update manager
|
||||||
|
* @param {Object} deps.selfUpdater - DashCaddy self-update manager
|
||||||
|
* @param {Function} deps.asyncHandler - Async route handler wrapper
|
||||||
|
* @param {Function} deps.logError - Error logging function
|
||||||
|
* @returns {express.Router}
|
||||||
|
*/
|
||||||
|
module.exports = function({ updateManager, selfUpdater, asyncHandler, logError }) {
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// ===== UPDATE MANAGEMENT ENDPOINTS =====
|
// ===== UPDATE MANAGEMENT ENDPOINTS =====
|
||||||
|
|
||||||
// Check for updates
|
// Check for updates
|
||||||
router.post('/updates/check', ctx.asyncHandler(async (req, res) => {
|
router.post('/updates/check', asyncHandler(async (req, res) => {
|
||||||
await ctx.updateManager.checkForUpdates();
|
await updateManager.checkForUpdates();
|
||||||
const updates = ctx.updateManager.getAvailableUpdates();
|
const updates = updateManager.getAvailableUpdates();
|
||||||
res.json({ success: true, updates, count: updates.length });
|
res.json({ success: true, updates, count: updates.length });
|
||||||
}, 'updates-check'));
|
}, 'updates-check'));
|
||||||
|
|
||||||
// Get available updates
|
// Get available updates
|
||||||
router.get('/updates/available', ctx.asyncHandler(async (req, res) => {
|
router.get('/updates/available', asyncHandler(async (req, res) => {
|
||||||
const updates = ctx.updateManager.getAvailableUpdates();
|
const updates = updateManager.getAvailableUpdates();
|
||||||
const paginationParams = parsePaginationParams(req.query);
|
const paginationParams = parsePaginationParams(req.query);
|
||||||
const result = paginate(updates, paginationParams);
|
const result = paginate(updates, paginationParams);
|
||||||
res.json({ success: true, updates: result.data, count: updates.length, ...(result.pagination && { pagination: result.pagination }) });
|
res.json({ success: true, updates: result.data, count: updates.length, ...(result.pagination && { pagination: result.pagination }) });
|
||||||
}, 'updates-available'));
|
}, 'updates-available'));
|
||||||
|
|
||||||
// Update a container
|
// Update a container
|
||||||
router.post('/updates/update/:containerId', ctx.asyncHandler(async (req, res) => {
|
router.post('/updates/update/:containerId', asyncHandler(async (req, res) => {
|
||||||
const result = await ctx.updateManager.updateContainer(req.params.containerId, req.body);
|
const result = await updateManager.updateContainer(req.params.containerId, req.body);
|
||||||
res.json({ success: true, result });
|
res.json({ success: true, result });
|
||||||
}, 'updates-update'));
|
}, 'updates-update'));
|
||||||
|
|
||||||
// Rollback update
|
// Rollback update
|
||||||
router.post('/updates/rollback/:containerId', ctx.asyncHandler(async (req, res) => {
|
router.post('/updates/rollback/:containerId', asyncHandler(async (req, res) => {
|
||||||
await ctx.updateManager.rollbackUpdate(req.params.containerId);
|
await updateManager.rollbackUpdate(req.params.containerId);
|
||||||
res.json({ success: true, message: 'Rollback completed' });
|
res.json({ success: true, message: 'Rollback completed' });
|
||||||
}, 'updates-rollback'));
|
}, 'updates-rollback'));
|
||||||
|
|
||||||
// Get update history
|
// Get update history
|
||||||
router.get('/updates/history', ctx.asyncHandler(async (req, res) => {
|
router.get('/updates/history', asyncHandler(async (req, res) => {
|
||||||
const paginationParams = parsePaginationParams(req.query);
|
const paginationParams = parsePaginationParams(req.query);
|
||||||
// When paginating, fetch all history so pagination can slice correctly
|
// When paginating, fetch all history so pagination can slice correctly
|
||||||
const fetchLimit = paginationParams ? Number.MAX_SAFE_INTEGER : (parseInt(req.query.limit) || 50);
|
const fetchLimit = paginationParams ? Number.MAX_SAFE_INTEGER : (parseInt(req.query.limit) || 50);
|
||||||
const history = ctx.updateManager.getHistory(fetchLimit);
|
const history = updateManager.getHistory(fetchLimit);
|
||||||
const result = paginate(history, paginationParams);
|
const result = paginate(history, paginationParams);
|
||||||
res.json({ success: true, history: result.data, ...(result.pagination && { pagination: result.pagination }) });
|
res.json({ success: true, history: result.data, ...(result.pagination && { pagination: result.pagination }) });
|
||||||
}, 'updates-history'));
|
}, 'updates-history'));
|
||||||
|
|
||||||
// Configure auto-update
|
// Configure auto-update
|
||||||
router.post('/updates/auto-update/:containerId', ctx.asyncHandler(async (req, res) => {
|
router.post('/updates/auto-update/:containerId', asyncHandler(async (req, res) => {
|
||||||
ctx.updateManager.configureAutoUpdate(req.params.containerId, req.body);
|
updateManager.configureAutoUpdate(req.params.containerId, req.body);
|
||||||
res.json({ success: true, message: 'Auto-update configured' });
|
res.json({ success: true, message: 'Auto-update configured' });
|
||||||
}, 'updates-auto-update'));
|
}, 'updates-auto-update'));
|
||||||
|
|
||||||
// Schedule update
|
// Schedule update
|
||||||
router.post('/updates/schedule/:containerId', ctx.asyncHandler(async (req, res) => {
|
router.post('/updates/schedule/:containerId', asyncHandler(async (req, res) => {
|
||||||
const { scheduledTime } = req.body;
|
const { scheduledTime } = req.body;
|
||||||
if (!scheduledTime) {
|
if (!scheduledTime) {
|
||||||
throw new ValidationError('scheduledTime is required');
|
throw new ValidationError('scheduledTime is required');
|
||||||
}
|
}
|
||||||
ctx.updateManager.scheduleUpdate(req.params.containerId, scheduledTime);
|
updateManager.scheduleUpdate(req.params.containerId, scheduledTime);
|
||||||
res.json({ success: true, message: 'Update scheduled', scheduledTime });
|
res.json({ success: true, message: 'Update scheduled', scheduledTime });
|
||||||
}, 'updates-schedule'));
|
}, 'updates-schedule'));
|
||||||
|
|
||||||
// ===== DASHCADDY SELF-UPDATE ENDPOINTS =====
|
// ===== DASHCADDY SELF-UPDATE ENDPOINTS =====
|
||||||
|
|
||||||
// Get current version
|
// Get current version
|
||||||
router.get('/system/version', ctx.asyncHandler(async (req, res) => {
|
router.get('/system/version', asyncHandler(async (req, res) => {
|
||||||
const local = ctx.selfUpdater.getLocalVersion();
|
const local = selfUpdater.getLocalVersion();
|
||||||
res.json({ success: true, name: 'DashCaddy', version: local.version, commit: local.commit });
|
res.json({ success: true, name: 'DashCaddy', version: local.version, commit: local.commit });
|
||||||
}, 'system-version'));
|
}, 'system-version'));
|
||||||
|
|
||||||
// Check for DashCaddy update
|
// Check for DashCaddy update
|
||||||
router.get('/system/update-check', ctx.asyncHandler(async (req, res) => {
|
router.get('/system/update-check', asyncHandler(async (req, res) => {
|
||||||
const result = await ctx.selfUpdater.checkForUpdate();
|
const result = await selfUpdater.checkForUpdate();
|
||||||
res.json({ success: true, ...result });
|
res.json({ success: true, ...result });
|
||||||
}, 'system-update-check'));
|
}, 'system-update-check'));
|
||||||
|
|
||||||
// Apply available update
|
// Apply available update
|
||||||
router.post('/system/update-apply', ctx.asyncHandler(async (req, res) => {
|
router.post('/system/update-apply', asyncHandler(async (req, res) => {
|
||||||
const check = await ctx.selfUpdater.checkForUpdate();
|
const check = await selfUpdater.checkForUpdate();
|
||||||
if (!check.available) {
|
if (!check.available) {
|
||||||
return res.json({ success: true, message: 'Already up to date' });
|
return res.json({ success: true, message: 'Already up to date' });
|
||||||
}
|
}
|
||||||
// Start async — container may restart
|
// Start async — container may restart
|
||||||
ctx.selfUpdater.applyUpdate(check.remote).catch(err => {
|
selfUpdater.applyUpdate(check.remote).catch(err => {
|
||||||
ctx.logError('self-update', err);
|
logError('self-update', err);
|
||||||
});
|
});
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@@ -93,33 +102,33 @@ module.exports = function(ctx) {
|
|||||||
}, 'system-update-apply'));
|
}, 'system-update-apply'));
|
||||||
|
|
||||||
// Get update status
|
// Get update status
|
||||||
router.get('/system/update-status', ctx.asyncHandler(async (req, res) => {
|
router.get('/system/update-status', asyncHandler(async (req, res) => {
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
status: ctx.selfUpdater.getStatus(),
|
status: selfUpdater.getStatus(),
|
||||||
lastCheck: ctx.selfUpdater.lastCheckTime,
|
lastCheck: selfUpdater.lastCheckTime,
|
||||||
lastResult: ctx.selfUpdater.lastCheckResult,
|
lastResult: selfUpdater.lastCheckResult,
|
||||||
});
|
});
|
||||||
}, 'system-update-status'));
|
}, 'system-update-status'));
|
||||||
|
|
||||||
// Get self-update history
|
// Get self-update history
|
||||||
router.get('/system/update-history', ctx.asyncHandler(async (req, res) => {
|
router.get('/system/update-history', asyncHandler(async (req, res) => {
|
||||||
const history = ctx.selfUpdater.getUpdateHistory();
|
const history = selfUpdater.getUpdateHistory();
|
||||||
res.json({ success: true, history });
|
res.json({ success: true, history });
|
||||||
}, 'system-update-history'));
|
}, 'system-update-history'));
|
||||||
|
|
||||||
// List rollback versions
|
// List rollback versions
|
||||||
router.get('/system/rollback-versions', ctx.asyncHandler(async (req, res) => {
|
router.get('/system/rollback-versions', asyncHandler(async (req, res) => {
|
||||||
const versions = ctx.selfUpdater.getAvailableRollbacks();
|
const versions = selfUpdater.getAvailableRollbacks();
|
||||||
res.json({ success: true, versions });
|
res.json({ success: true, versions });
|
||||||
}, 'system-rollback-versions'));
|
}, 'system-rollback-versions'));
|
||||||
|
|
||||||
// Rollback to a previous version
|
// Rollback to a previous version
|
||||||
router.post('/system/rollback', ctx.asyncHandler(async (req, res) => {
|
router.post('/system/rollback', asyncHandler(async (req, res) => {
|
||||||
const { version } = req.body;
|
const { version } = req.body;
|
||||||
if (!version) throw new ValidationError('version is required');
|
if (!version) throw new ValidationError('version is required');
|
||||||
ctx.selfUpdater.rollbackToVersion(version).catch(err => {
|
selfUpdater.rollbackToVersion(version).catch(err => {
|
||||||
ctx.logError('self-rollback', err);
|
logError('self-rollback', err);
|
||||||
});
|
});
|
||||||
res.json({ success: true, message: `Rollback to ${version} initiated` });
|
res.json({ success: true, message: `Rollback to ${version} initiated` });
|
||||||
}, 'system-rollback'));
|
}, 'system-rollback'));
|
||||||
|
|||||||
1997
dashcaddy-api/server-old.js
Normal file
1997
dashcaddy-api/server-old.js
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
575
dashcaddy-api/src/app.js
Normal file
575
dashcaddy-api/src/app.js
Normal file
@@ -0,0 +1,575 @@
|
|||||||
|
/**
|
||||||
|
* Express application setup
|
||||||
|
* Configures middleware, assembles context, and mounts routes
|
||||||
|
*/
|
||||||
|
const express = require('express');
|
||||||
|
const https = require('https');
|
||||||
|
const fs = require('fs');
|
||||||
|
|
||||||
|
// Configuration
|
||||||
|
const config = require('./config');
|
||||||
|
const { assembleContext } = require('./context');
|
||||||
|
const { createLogger, logError, safeErrorMessage } = require('./utils/logging');
|
||||||
|
const { fetchT } = require('./utils/http');
|
||||||
|
const { errorResponse, ok } = require('./utils/responses');
|
||||||
|
const { asyncHandler } = require('./utils/async-handler');
|
||||||
|
|
||||||
|
// Managers and utilities
|
||||||
|
const StateManager = require('../state-manager');
|
||||||
|
const { LicenseManager } = require('../license-manager');
|
||||||
|
const credentialManager = require('../credential-manager');
|
||||||
|
const authManager = require('../auth-manager');
|
||||||
|
const dockerSecurity = require('../docker-security');
|
||||||
|
const auditLogger = require('../audit-logger');
|
||||||
|
const portLockManager = require('../port-lock-manager');
|
||||||
|
const resourceMonitor = require('../resource-monitor');
|
||||||
|
const backupManager = require('../backup-manager');
|
||||||
|
const healthChecker = require('../health-checker');
|
||||||
|
const updateManager = require('../update-manager');
|
||||||
|
const selfUpdater = require('../self-updater');
|
||||||
|
const configureMiddleware = require('../middleware');
|
||||||
|
const { validateStartupConfig, syncHealthCheckerServices } = require('../startup-validator');
|
||||||
|
const { CSRF_HEADER_NAME } = require('../csrf-protection');
|
||||||
|
const { resolveServiceUrl } = require('../url-resolver');
|
||||||
|
const metrics = require('../metrics');
|
||||||
|
const { validateURL } = require('../input-validator');
|
||||||
|
|
||||||
|
// Optional modules
|
||||||
|
let dockerMaintenance, logDigest;
|
||||||
|
try { dockerMaintenance = require('../docker-maintenance'); } catch (_) {}
|
||||||
|
try { logDigest = require('../log-digest'); } catch (_) {}
|
||||||
|
|
||||||
|
// Templates
|
||||||
|
const { APP_TEMPLATES, TEMPLATE_CATEGORIES, DIFFICULTY_LEVELS } = require('../app-templates');
|
||||||
|
const { RECIPE_TEMPLATES, RECIPE_CATEGORIES } = require('../recipe-templates');
|
||||||
|
|
||||||
|
// Route modules
|
||||||
|
const healthRoutes = require('../routes/health');
|
||||||
|
const monitoringRoutes = require('../routes/monitoring');
|
||||||
|
const updatesRoutes = require('../routes/updates');
|
||||||
|
const authRoutes = require('../routes/auth');
|
||||||
|
const configRoutes = require('../routes/config');
|
||||||
|
const dnsRoutes = require('../routes/dns');
|
||||||
|
const notificationRoutes = require('../routes/notifications');
|
||||||
|
const containerRoutes = require('../routes/containers');
|
||||||
|
const serviceRoutes = require('../routes/services');
|
||||||
|
const tailscaleRoutes = require('../routes/tailscale');
|
||||||
|
const sitesRoutes = require('../routes/sites');
|
||||||
|
const credentialsRoutes = require('../routes/credentials');
|
||||||
|
const arrRoutes = require('../routes/arr');
|
||||||
|
const appsRoutes = require('../routes/apps');
|
||||||
|
const logsRoutes = require('../routes/logs');
|
||||||
|
const backupsRoutes = require('../routes/backups');
|
||||||
|
const caRoutes = require('../routes/ca');
|
||||||
|
const browseRoutes = require('../routes/browse');
|
||||||
|
const errorLogsRoutes = require('../routes/errorlogs');
|
||||||
|
const licenseRoutes = require('../routes/license');
|
||||||
|
const recipesRoutes = require('../routes/recipes');
|
||||||
|
const themesRoutes = require('../routes/themes');
|
||||||
|
|
||||||
|
// Constants
|
||||||
|
const { APP } = require('../constants');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create and configure the Express application
|
||||||
|
*/
|
||||||
|
async function createApp() {
|
||||||
|
const app = express();
|
||||||
|
|
||||||
|
// Initialize logging
|
||||||
|
const log = createLogger(config.LOG_LEVEL);
|
||||||
|
|
||||||
|
// Load site configuration
|
||||||
|
config.loadSiteConfig(config.CONFIG_FILE, log);
|
||||||
|
|
||||||
|
// Create state managers
|
||||||
|
const servicesStateManager = new StateManager(config.SERVICES_FILE);
|
||||||
|
const configStateManager = new StateManager(config.CONFIG_FILE);
|
||||||
|
|
||||||
|
// Initialize license manager
|
||||||
|
const licenseManager = new LicenseManager(credentialManager, config.CONFIG_FILE, console);
|
||||||
|
licenseManager.loadSecret(config.LICENSE_SECRET_FILE);
|
||||||
|
|
||||||
|
// HTTPS agent for internal CA
|
||||||
|
const CA_CERT_PATH = process.env.CA_CERT_PATH || '/app/pki/root.crt';
|
||||||
|
let httpsAgent;
|
||||||
|
try {
|
||||||
|
const caCert = fs.readFileSync(CA_CERT_PATH);
|
||||||
|
httpsAgent = new https.Agent({ ca: [...require('tls').rootCertificates, caCert] });
|
||||||
|
log.info('server', 'HTTPS agent configured with CA certificate', { path: CA_CERT_PATH });
|
||||||
|
} catch {
|
||||||
|
httpsAgent = new https.Agent();
|
||||||
|
log.warn('server', 'CA cert not found — HTTPS calls may fail', { path: CA_CERT_PATH });
|
||||||
|
}
|
||||||
|
|
||||||
|
// TOTP configuration
|
||||||
|
let totpConfig = {
|
||||||
|
enabled: false,
|
||||||
|
sessionDuration: 'never',
|
||||||
|
isSetUp: false
|
||||||
|
};
|
||||||
|
|
||||||
|
// Tailscale configuration
|
||||||
|
let tailscaleConfig = {
|
||||||
|
enabled: false,
|
||||||
|
requireAuth: false,
|
||||||
|
allowedTailnet: null,
|
||||||
|
devices: [],
|
||||||
|
oauthConfigured: false,
|
||||||
|
tailnet: null,
|
||||||
|
syncInterval: 300,
|
||||||
|
lastSync: null
|
||||||
|
};
|
||||||
|
|
||||||
|
// Helper functions needed by middleware
|
||||||
|
function isValidContainerId(id) {
|
||||||
|
const CONTAINER_ID_RE = /^[a-zA-Z0-9][a-zA-Z0-9_.\-]{0,127}$/;
|
||||||
|
return typeof id === 'string' && CONTAINER_ID_RE.test(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
function isTailscaleIP(ip) {
|
||||||
|
if (!ip) return false;
|
||||||
|
const parts = ip.split('.');
|
||||||
|
if (parts.length !== 4) return false;
|
||||||
|
const first = parseInt(parts[0]);
|
||||||
|
const second = parseInt(parts[1]);
|
||||||
|
return first === 100 && second >= 64 && second <= 127;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getTailscaleStatus() {
|
||||||
|
// Stub for now - will be populated by context
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configure middleware
|
||||||
|
const middlewareResult = configureMiddleware(app, {
|
||||||
|
siteConfig: config.siteConfig,
|
||||||
|
totpConfig,
|
||||||
|
tailscaleConfig,
|
||||||
|
metrics,
|
||||||
|
auditLogger,
|
||||||
|
authManager,
|
||||||
|
log,
|
||||||
|
cryptoUtils: require('../crypto-utils'),
|
||||||
|
isValidContainerId,
|
||||||
|
isTailscaleIP,
|
||||||
|
getTailscaleStatus,
|
||||||
|
RATE_LIMITS: require('../constants').RATE_LIMITS,
|
||||||
|
LIMITS: require('../constants').LIMITS,
|
||||||
|
APP: require('../constants').APP,
|
||||||
|
CACHE_CONFIGS: require('../cache-config').CACHE_CONFIGS,
|
||||||
|
createCache: require('../cache-config').createCache,
|
||||||
|
});
|
||||||
|
|
||||||
|
const { strictLimiter } = middlewareResult;
|
||||||
|
|
||||||
|
// Helper functions
|
||||||
|
async function getServiceById(serviceId) {
|
||||||
|
const services = await servicesStateManager.read();
|
||||||
|
return services.find(s => s.id === serviceId) || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function readConfig() {
|
||||||
|
const { readJsonFile } = require('../fs-helpers');
|
||||||
|
return readJsonFile(config.CONFIG_FILE, {});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function saveConfig(updates) {
|
||||||
|
return await configStateManager.update(cfg => Object.assign(cfg, updates));
|
||||||
|
}
|
||||||
|
|
||||||
|
async function addServiceToConfig(service) {
|
||||||
|
await servicesStateManager.update(services => {
|
||||||
|
const existingIndex = services.findIndex(s => s.id === service.id);
|
||||||
|
if (existingIndex >= 0) {
|
||||||
|
services[existingIndex] = { ...services[existingIndex], ...service };
|
||||||
|
} else {
|
||||||
|
services.push(service);
|
||||||
|
}
|
||||||
|
return services;
|
||||||
|
});
|
||||||
|
log.info('deploy', 'Service added to config', { serviceId: service.id });
|
||||||
|
}
|
||||||
|
|
||||||
|
async function saveTotpConfig() {
|
||||||
|
// Stub - will be implemented
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadNotificationConfig() {
|
||||||
|
// Stub - will be implemented
|
||||||
|
}
|
||||||
|
|
||||||
|
async function resyncHealthChecker() {
|
||||||
|
return syncHealthCheckerServices({
|
||||||
|
log,
|
||||||
|
SERVICES_FILE: config.SERVICES_FILE,
|
||||||
|
servicesStateManager,
|
||||||
|
healthChecker,
|
||||||
|
buildServiceUrl: config.buildServiceUrl,
|
||||||
|
siteConfig: config.siteConfig,
|
||||||
|
APP
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create bound logError function
|
||||||
|
const boundLogError = (context, error, additionalInfo) =>
|
||||||
|
logError(config.ERROR_LOG_FILE, config.MAX_ERROR_LOG_SIZE, context, error, additionalInfo, log);
|
||||||
|
|
||||||
|
// Create bound asyncHandler
|
||||||
|
const boundAsyncHandler = (fn, context) => asyncHandler(boundLogError, fn, context);
|
||||||
|
|
||||||
|
// Assemble context
|
||||||
|
const ctx = assembleContext({
|
||||||
|
// Config
|
||||||
|
siteConfig: config.siteConfig,
|
||||||
|
buildDomain: config.buildDomain,
|
||||||
|
buildServiceUrl: config.buildServiceUrl,
|
||||||
|
SERVICES_FILE: config.SERVICES_FILE,
|
||||||
|
CONFIG_FILE: config.CONFIG_FILE,
|
||||||
|
TOTP_CONFIG_FILE: config.TOTP_CONFIG_FILE,
|
||||||
|
TAILSCALE_CONFIG_FILE: config.TAILSCALE_CONFIG_FILE,
|
||||||
|
NOTIFICATIONS_FILE: config.NOTIFICATIONS_FILE,
|
||||||
|
ERROR_LOG_FILE: config.ERROR_LOG_FILE,
|
||||||
|
DNS_CREDENTIALS_FILE: config.DNS_CREDENTIALS_FILE,
|
||||||
|
CADDYFILE_PATH: config.CADDYFILE_PATH,
|
||||||
|
CADDY_ADMIN_URL: config.CADDY_ADMIN_URL,
|
||||||
|
|
||||||
|
// State managers
|
||||||
|
servicesStateManager,
|
||||||
|
configStateManager,
|
||||||
|
|
||||||
|
// Managers
|
||||||
|
credentialManager,
|
||||||
|
authManager,
|
||||||
|
licenseManager,
|
||||||
|
healthChecker,
|
||||||
|
updateManager,
|
||||||
|
backupManager,
|
||||||
|
resourceMonitor,
|
||||||
|
auditLogger,
|
||||||
|
portLockManager,
|
||||||
|
selfUpdater,
|
||||||
|
dockerMaintenance,
|
||||||
|
logDigest,
|
||||||
|
dockerSecurity,
|
||||||
|
|
||||||
|
// Templates
|
||||||
|
APP_TEMPLATES,
|
||||||
|
TEMPLATE_CATEGORIES,
|
||||||
|
DIFFICULTY_LEVELS,
|
||||||
|
RECIPE_TEMPLATES,
|
||||||
|
RECIPE_CATEGORIES,
|
||||||
|
|
||||||
|
// Helpers
|
||||||
|
asyncHandler: boundAsyncHandler,
|
||||||
|
errorResponse,
|
||||||
|
ok,
|
||||||
|
fetchT,
|
||||||
|
httpsAgent,
|
||||||
|
log,
|
||||||
|
logError: boundLogError,
|
||||||
|
safeErrorMessage,
|
||||||
|
getServiceById,
|
||||||
|
readConfig,
|
||||||
|
saveConfig,
|
||||||
|
addServiceToConfig,
|
||||||
|
validateURL,
|
||||||
|
strictLimiter,
|
||||||
|
totpConfig,
|
||||||
|
saveTotpConfig,
|
||||||
|
loadSiteConfig: () => config.loadSiteConfig(config.CONFIG_FILE, log),
|
||||||
|
loadNotificationConfig,
|
||||||
|
resyncHealthChecker,
|
||||||
|
|
||||||
|
// Middleware result
|
||||||
|
middlewareResult,
|
||||||
|
|
||||||
|
// App
|
||||||
|
app,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Build versioned API router
|
||||||
|
const apiRouter = express.Router();
|
||||||
|
|
||||||
|
// Mount route modules
|
||||||
|
apiRouter.use(authRoutes(ctx));
|
||||||
|
apiRouter.use(configRoutes(ctx));
|
||||||
|
apiRouter.use('/dns', dnsRoutes({
|
||||||
|
dns: ctx.dns,
|
||||||
|
siteConfig: ctx.siteConfig,
|
||||||
|
asyncHandler: ctx.asyncHandler,
|
||||||
|
log: ctx.log,
|
||||||
|
safeErrorMessage: ctx.safeErrorMessage,
|
||||||
|
fetchT: ctx.fetchT,
|
||||||
|
credentialManager: ctx.credentialManager
|
||||||
|
}));
|
||||||
|
apiRouter.use('/notifications', notificationRoutes({
|
||||||
|
notification: ctx.notification,
|
||||||
|
asyncHandler: ctx.asyncHandler
|
||||||
|
}));
|
||||||
|
apiRouter.use('/containers', containerRoutes({
|
||||||
|
docker: ctx.docker,
|
||||||
|
log: ctx.log,
|
||||||
|
asyncHandler: ctx.asyncHandler
|
||||||
|
}));
|
||||||
|
apiRouter.use(serviceRoutes({
|
||||||
|
servicesStateManager: ctx.servicesStateManager,
|
||||||
|
credentialManager: ctx.credentialManager,
|
||||||
|
siteConfig: ctx.siteConfig,
|
||||||
|
buildServiceUrl: ctx.buildServiceUrl,
|
||||||
|
buildDomain: ctx.buildDomain,
|
||||||
|
fetchT: ctx.fetchT,
|
||||||
|
asyncHandler: ctx.asyncHandler,
|
||||||
|
SERVICES_FILE: ctx.SERVICES_FILE,
|
||||||
|
log: ctx.log,
|
||||||
|
safeErrorMessage: ctx.safeErrorMessage,
|
||||||
|
resyncHealthChecker: ctx.resyncHealthChecker,
|
||||||
|
caddy: ctx.caddy,
|
||||||
|
dns: ctx.dns
|
||||||
|
}));
|
||||||
|
apiRouter.use(healthRoutes({
|
||||||
|
fetchT: ctx.fetchT,
|
||||||
|
SERVICES_FILE: ctx.SERVICES_FILE,
|
||||||
|
servicesStateManager: ctx.servicesStateManager,
|
||||||
|
siteConfig: ctx.siteConfig,
|
||||||
|
buildServiceUrl: ctx.buildServiceUrl,
|
||||||
|
asyncHandler: ctx.asyncHandler,
|
||||||
|
logError: ctx.logError,
|
||||||
|
healthChecker: ctx.healthChecker
|
||||||
|
}));
|
||||||
|
apiRouter.use(monitoringRoutes({
|
||||||
|
resourceMonitor: ctx.resourceMonitor,
|
||||||
|
docker: ctx.docker,
|
||||||
|
asyncHandler: ctx.asyncHandler
|
||||||
|
}));
|
||||||
|
apiRouter.use(updatesRoutes({
|
||||||
|
updateManager: ctx.updateManager,
|
||||||
|
selfUpdater: ctx.selfUpdater,
|
||||||
|
asyncHandler: ctx.asyncHandler,
|
||||||
|
logError: ctx.logError
|
||||||
|
}));
|
||||||
|
apiRouter.use('/tailscale', tailscaleRoutes({
|
||||||
|
tailscale: ctx.tailscale,
|
||||||
|
caddy: ctx.caddy,
|
||||||
|
servicesStateManager: ctx.servicesStateManager,
|
||||||
|
credentialManager: ctx.credentialManager,
|
||||||
|
buildDomain: ctx.buildDomain,
|
||||||
|
asyncHandler: ctx.asyncHandler,
|
||||||
|
SERVICES_FILE: ctx.SERVICES_FILE,
|
||||||
|
log: ctx.log
|
||||||
|
}));
|
||||||
|
apiRouter.use(sitesRoutes({
|
||||||
|
asyncHandler: ctx.asyncHandler,
|
||||||
|
caddy: ctx.caddy,
|
||||||
|
dns: ctx.dns,
|
||||||
|
fetchT: ctx.fetchT,
|
||||||
|
buildDomain: ctx.buildDomain,
|
||||||
|
addServiceToConfig: ctx.addServiceToConfig,
|
||||||
|
siteConfig: ctx.siteConfig,
|
||||||
|
log: ctx.log
|
||||||
|
}));
|
||||||
|
apiRouter.use(credentialsRoutes({
|
||||||
|
credentialManager: ctx.credentialManager,
|
||||||
|
asyncHandler: ctx.asyncHandler
|
||||||
|
}));
|
||||||
|
apiRouter.use(arrRoutes(ctx));
|
||||||
|
apiRouter.use(appsRoutes(ctx));
|
||||||
|
apiRouter.use(logsRoutes({
|
||||||
|
asyncHandler: ctx.asyncHandler,
|
||||||
|
docker: ctx.docker,
|
||||||
|
logDigest: ctx.logDigest,
|
||||||
|
dockerMaintenance: ctx.dockerMaintenance
|
||||||
|
}));
|
||||||
|
apiRouter.use(backupsRoutes({
|
||||||
|
backupManager: ctx.backupManager,
|
||||||
|
asyncHandler: ctx.asyncHandler
|
||||||
|
}));
|
||||||
|
apiRouter.use('/ca', caRoutes(ctx));
|
||||||
|
apiRouter.use(browseRoutes({
|
||||||
|
asyncHandler: ctx.asyncHandler,
|
||||||
|
validateSecurePath: ctx.validateSecurePath,
|
||||||
|
auditLogger: ctx.auditLogger,
|
||||||
|
docker: ctx.docker
|
||||||
|
}));
|
||||||
|
apiRouter.use(errorLogsRoutes({
|
||||||
|
ERROR_LOG_FILE: ctx.ERROR_LOG_FILE,
|
||||||
|
auditLogger: ctx.auditLogger,
|
||||||
|
asyncHandler: ctx.asyncHandler
|
||||||
|
}));
|
||||||
|
apiRouter.use('/license', licenseRoutes({
|
||||||
|
licenseManager: ctx.licenseManager,
|
||||||
|
asyncHandler: ctx.asyncHandler
|
||||||
|
}));
|
||||||
|
apiRouter.use('/recipes', recipesRoutes(ctx));
|
||||||
|
apiRouter.use(themesRoutes({ asyncHandler: ctx.asyncHandler }));
|
||||||
|
|
||||||
|
// Inline API routes
|
||||||
|
apiRouter.get('/health', (req, res) => {
|
||||||
|
res.json({ status: 'ok', timestamp: new Date().toISOString() });
|
||||||
|
});
|
||||||
|
|
||||||
|
apiRouter.get('/csrf-token', (req, res) => {
|
||||||
|
res.json({ success: true, token: req.csrfToken, headerName: CSRF_HEADER_NAME });
|
||||||
|
});
|
||||||
|
|
||||||
|
apiRouter.get('/metrics', (req, res) => {
|
||||||
|
res.json({ success: true, metrics: metrics.getSummary() });
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mount at /api/v1 (canonical) and /api (legacy)
|
||||||
|
app.use('/api/v1', apiRouter);
|
||||||
|
app.use('/api', apiRouter);
|
||||||
|
|
||||||
|
// Root-level health check
|
||||||
|
app.get('/health', (req, res) => {
|
||||||
|
res.json({ status: 'ok', timestamp: new Date().toISOString() });
|
||||||
|
});
|
||||||
|
|
||||||
|
// Lightweight probe endpoint
|
||||||
|
app.get('/probe/:id', boundAsyncHandler(async (req, res) => {
|
||||||
|
const id = req.params.id;
|
||||||
|
const { exists } = require('../fs-helpers');
|
||||||
|
|
||||||
|
let service = null;
|
||||||
|
if (id !== 'internet' && await exists(config.SERVICES_FILE)) {
|
||||||
|
const data = await servicesStateManager.read();
|
||||||
|
const services = Array.isArray(data) ? data : data.services || [];
|
||||||
|
service = services.find(s => s.id === id);
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = resolveServiceUrl(id, service, config.siteConfig, config.buildServiceUrl);
|
||||||
|
const parsed = new URL(url);
|
||||||
|
const isHttps = parsed.protocol === 'https:';
|
||||||
|
const lib = isHttps ? https : require('http');
|
||||||
|
|
||||||
|
const options = {
|
||||||
|
hostname: parsed.hostname,
|
||||||
|
port: parsed.port || (isHttps ? 443 : 80),
|
||||||
|
path: parsed.pathname + parsed.search,
|
||||||
|
method: 'HEAD',
|
||||||
|
timeout: 5000,
|
||||||
|
agent: isHttps ? httpsAgent : undefined,
|
||||||
|
headers: { 'User-Agent': APP.USER_AGENTS.PROBE },
|
||||||
|
};
|
||||||
|
|
||||||
|
const makeRequest = (method) => new Promise((resolve, reject) => {
|
||||||
|
const reqOpts = { ...options, method };
|
||||||
|
const probeReq = lib.request(reqOpts, (response) => {
|
||||||
|
response.resume();
|
||||||
|
resolve(response.statusCode);
|
||||||
|
});
|
||||||
|
probeReq.on('error', reject);
|
||||||
|
probeReq.on('timeout', () => { probeReq.destroy(); reject(new Error('Timeout')); });
|
||||||
|
probeReq.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
let statusCode;
|
||||||
|
try {
|
||||||
|
statusCode = await makeRequest('HEAD');
|
||||||
|
if (statusCode === 501 || statusCode === 405) {
|
||||||
|
statusCode = await makeRequest('GET');
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
const fallbackUrl = `https://${config.buildDomain(id)}`;
|
||||||
|
const fp = new URL(fallbackUrl);
|
||||||
|
statusCode = await new Promise((resolve, reject) => {
|
||||||
|
const fReq = https.request({
|
||||||
|
hostname: fp.hostname,
|
||||||
|
port: 443,
|
||||||
|
path: '/',
|
||||||
|
method: 'GET',
|
||||||
|
timeout: 5000,
|
||||||
|
agent: httpsAgent,
|
||||||
|
headers: { 'User-Agent': APP.USER_AGENTS.PROBE }
|
||||||
|
}, (fRes) => {
|
||||||
|
fRes.resume();
|
||||||
|
resolve(fRes.statusCode);
|
||||||
|
});
|
||||||
|
fReq.on('error', reject);
|
||||||
|
fReq.on('timeout', () => { fReq.destroy(); reject(new Error('Timeout')); });
|
||||||
|
fReq.end();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
res.status(statusCode).send();
|
||||||
|
}, 'probe'));
|
||||||
|
|
||||||
|
// Network IPs endpoint
|
||||||
|
app.get('/api/network/ips', (req, res) => {
|
||||||
|
try {
|
||||||
|
const os = require('os');
|
||||||
|
const envLan = process.env.HOST_LAN_IP;
|
||||||
|
const envTailscale = process.env.HOST_TAILSCALE_IP;
|
||||||
|
|
||||||
|
const result = {
|
||||||
|
localhost: '127.0.0.1',
|
||||||
|
lan: envLan || null,
|
||||||
|
tailscale: envTailscale || null,
|
||||||
|
all: []
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!envLan || !envTailscale) {
|
||||||
|
const interfaces = os.networkInterfaces();
|
||||||
|
for (const [name, addrs] of Object.entries(interfaces)) {
|
||||||
|
for (const addr of addrs) {
|
||||||
|
if (addr.internal || addr.family !== 'IPv4') continue;
|
||||||
|
const ip = addr.address;
|
||||||
|
result.all.push({ name, ip });
|
||||||
|
|
||||||
|
if (!result.tailscale && ip.startsWith('100.')) {
|
||||||
|
result.tailscale = ip;
|
||||||
|
} else if (!result.lan && (ip.startsWith('192.168.') || ip.startsWith('10.') || ip.match(/^172\.(1[6-9]|2[0-9]|3[0-1])\./))) {
|
||||||
|
result.lan = ip;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json(result);
|
||||||
|
} catch (error) {
|
||||||
|
errorResponse(res, 500, safeErrorMessage(error));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// API Documentation
|
||||||
|
app.get('/api/docs', (req, res) => {
|
||||||
|
res.setHeader('Content-Security-Policy', "default-src 'self'; script-src 'self' 'unsafe-inline' https://unpkg.com; style-src 'self' 'unsafe-inline' https://unpkg.com; img-src 'self' data: https:; connect-src 'self'; font-src 'self' data: https://unpkg.com;");
|
||||||
|
res.send(`<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8"/>
|
||||||
|
<title>DashCaddy API Documentation</title>
|
||||||
|
<link rel="stylesheet" href="https://unpkg.com/swagger-ui-dist@5/swagger-ui.css"/>
|
||||||
|
<style>body{margin:0} .swagger-ui .topbar{display:none}</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div id="swagger-ui"></div>
|
||||||
|
<script src="https://unpkg.com/swagger-ui-dist@5/swagger-ui-bundle.js"></script>
|
||||||
|
<script>SwaggerUIBundle({url:'/api/docs/spec',dom_id:'#swagger-ui',deepLinking:true})</script>
|
||||||
|
</body>
|
||||||
|
</html>`);
|
||||||
|
});
|
||||||
|
|
||||||
|
app.get('/api/docs/spec', boundAsyncHandler(async (req, res) => {
|
||||||
|
const path = require('path');
|
||||||
|
const { exists } = require('../fs-helpers');
|
||||||
|
const fsp = require('fs').promises;
|
||||||
|
|
||||||
|
const specPath = path.join(__dirname, '../openapi.yaml');
|
||||||
|
if (await exists(specPath)) {
|
||||||
|
const yaml = await fsp.readFile(specPath, 'utf8');
|
||||||
|
res.type('text/yaml').send(yaml);
|
||||||
|
} else {
|
||||||
|
errorResponse(res, 404, 'OpenAPI spec not found');
|
||||||
|
}
|
||||||
|
}, 'api-docs-spec'));
|
||||||
|
|
||||||
|
// Error handlers (MUST be last)
|
||||||
|
const { notFoundHandler, errorMiddleware } = require('../error-handler');
|
||||||
|
app.use('/api', notFoundHandler);
|
||||||
|
app.use(errorMiddleware);
|
||||||
|
|
||||||
|
return { app, log, config: config.siteConfig, licenseManager };
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { createApp };
|
||||||
38
dashcaddy-api/src/config/index.js
Normal file
38
dashcaddy-api/src/config/index.js
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
/**
|
||||||
|
* Centralized configuration module
|
||||||
|
* Exports all configuration loading and path resolution
|
||||||
|
*/
|
||||||
|
const paths = require('./paths');
|
||||||
|
const site = require('./site');
|
||||||
|
const { APP, LIMITS, TIMEOUTS, RETRIES, CADDY } = require('../../constants');
|
||||||
|
|
||||||
|
// Load logging level
|
||||||
|
const LOG_LEVELS = { debug: 0, info: 1, warn: 2, error: 3 };
|
||||||
|
const LOG_LEVEL = LOG_LEVELS[process.env.LOG_LEVEL || 'info'] || 1;
|
||||||
|
|
||||||
|
const PORT = APP.PORT;
|
||||||
|
const MAX_ERROR_LOG_SIZE = LIMITS.ERROR_LOG_SIZE;
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
// Paths
|
||||||
|
...paths,
|
||||||
|
|
||||||
|
// Site configuration
|
||||||
|
siteConfig: site.siteConfig,
|
||||||
|
loadSiteConfig: site.loadSiteConfig,
|
||||||
|
buildDomain: site.buildDomain,
|
||||||
|
buildServiceUrl: site.buildServiceUrl,
|
||||||
|
|
||||||
|
// App constants
|
||||||
|
PORT,
|
||||||
|
LOG_LEVELS,
|
||||||
|
LOG_LEVEL,
|
||||||
|
MAX_ERROR_LOG_SIZE,
|
||||||
|
|
||||||
|
// Re-export constants for convenience
|
||||||
|
APP,
|
||||||
|
LIMITS,
|
||||||
|
TIMEOUTS,
|
||||||
|
RETRIES,
|
||||||
|
CADDY,
|
||||||
|
};
|
||||||
42
dashcaddy-api/src/config/paths.js
Normal file
42
dashcaddy-api/src/config/paths.js
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
/**
|
||||||
|
* Platform-specific paths and environment variable configuration
|
||||||
|
*/
|
||||||
|
const path = require('path');
|
||||||
|
const platformPaths = require('../../platform-paths');
|
||||||
|
|
||||||
|
const CADDYFILE_PATH = process.env.CADDYFILE_PATH || platformPaths.caddyfile;
|
||||||
|
const CADDY_ADMIN_URL = process.env.CADDY_ADMIN_URL || platformPaths.caddyAdminUrl;
|
||||||
|
const SERVICES_FILE = process.env.SERVICES_FILE || platformPaths.servicesFile;
|
||||||
|
const SERVICES_DIR = path.dirname(SERVICES_FILE);
|
||||||
|
const CONFIG_FILE = process.env.CONFIG_FILE || path.join(SERVICES_DIR, 'config.json');
|
||||||
|
const DNS_CREDENTIALS_FILE = process.env.DNS_CREDENTIALS_FILE || path.join(SERVICES_DIR, 'dns-credentials.json');
|
||||||
|
const TAILSCALE_CONFIG_FILE = process.env.TAILSCALE_CONFIG_FILE || path.join(SERVICES_DIR, 'tailscale-config.json');
|
||||||
|
const NOTIFICATIONS_FILE = process.env.NOTIFICATIONS_FILE || path.join(SERVICES_DIR, 'notifications.json');
|
||||||
|
const TOTP_CONFIG_FILE = process.env.TOTP_CONFIG_FILE || path.join(SERVICES_DIR, 'totp-config.json');
|
||||||
|
const ERROR_LOG_FILE = process.env.ERROR_LOG_FILE || path.join(__dirname, '../../dashcaddy-errors.log');
|
||||||
|
const LICENSE_SECRET_FILE = process.env.LICENSE_SECRET_FILE || path.join(__dirname, '../../.license-secret');
|
||||||
|
|
||||||
|
const BROWSE_ROOTS = (process.env.MEDIA_BROWSE_ROOTS || '')
|
||||||
|
.split(',')
|
||||||
|
.filter(r => r.includes('='))
|
||||||
|
.map(r => {
|
||||||
|
const eqIndex = r.indexOf('=');
|
||||||
|
const containerPath = r.slice(0, eqIndex).trim();
|
||||||
|
const hostPath = r.slice(eqIndex + 1).trim();
|
||||||
|
return { containerPath, hostPath };
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
CADDYFILE_PATH,
|
||||||
|
CADDY_ADMIN_URL,
|
||||||
|
SERVICES_FILE,
|
||||||
|
SERVICES_DIR,
|
||||||
|
CONFIG_FILE,
|
||||||
|
DNS_CREDENTIALS_FILE,
|
||||||
|
TAILSCALE_CONFIG_FILE,
|
||||||
|
NOTIFICATIONS_FILE,
|
||||||
|
TOTP_CONFIG_FILE,
|
||||||
|
ERROR_LOG_FILE,
|
||||||
|
LICENSE_SECRET_FILE,
|
||||||
|
BROWSE_ROOTS,
|
||||||
|
};
|
||||||
79
dashcaddy-api/src/config/site.js
Normal file
79
dashcaddy-api/src/config/site.js
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
/**
|
||||||
|
* Site configuration loader
|
||||||
|
* Loads and manages site-wide settings from config.json
|
||||||
|
*/
|
||||||
|
const fs = require('fs');
|
||||||
|
const { validateConfig } = require('../../config-schema');
|
||||||
|
const { CADDY } = require('../../constants');
|
||||||
|
|
||||||
|
let siteConfig = {
|
||||||
|
tld: '.home',
|
||||||
|
caName: '',
|
||||||
|
dnsServerIp: '',
|
||||||
|
dnsServerPort: CADDY.DEFAULT_DNS_PORT,
|
||||||
|
dashboardHost: '',
|
||||||
|
timezone: 'UTC',
|
||||||
|
dnsServers: {},
|
||||||
|
configurationType: 'homelab',
|
||||||
|
domain: '',
|
||||||
|
routingMode: 'subdomain'
|
||||||
|
};
|
||||||
|
|
||||||
|
function loadSiteConfig(CONFIG_FILE, log) {
|
||||||
|
try {
|
||||||
|
if (fs.existsSync(CONFIG_FILE)) {
|
||||||
|
const raw = JSON.parse(fs.readFileSync(CONFIG_FILE, 'utf8'));
|
||||||
|
|
||||||
|
// Validate config and log any issues
|
||||||
|
const { valid, errors: configErrors, warnings: configWarnings } = validateConfig(raw);
|
||||||
|
if (log && log.warn) {
|
||||||
|
if (!valid) {
|
||||||
|
log.warn('config', 'Config validation errors', { errors: configErrors });
|
||||||
|
}
|
||||||
|
for (const w of configWarnings) {
|
||||||
|
log.warn('config', w);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
siteConfig.tld = raw.tld || '.home';
|
||||||
|
if (!siteConfig.tld.startsWith('.')) siteConfig.tld = '.' + siteConfig.tld;
|
||||||
|
siteConfig.caName = raw.caName || '';
|
||||||
|
siteConfig.dnsServerIp = (raw.dns && raw.dns.ip) || '';
|
||||||
|
siteConfig.dnsServerPort = (raw.dns && raw.dns.port) || CADDY.DEFAULT_DNS_PORT;
|
||||||
|
siteConfig.dashboardHost = raw.dashboardHost || `status${siteConfig.tld}`;
|
||||||
|
siteConfig.timezone = raw.timezone || 'UTC';
|
||||||
|
siteConfig.dnsServers = raw.dnsServers || {};
|
||||||
|
siteConfig.configurationType = raw.configurationType || 'homelab';
|
||||||
|
siteConfig.domain = raw.domain || '';
|
||||||
|
siteConfig.routingMode = raw.routingMode || 'subdomain';
|
||||||
|
siteConfig.pylon = raw.pylon || null;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
if (log && log.error) {
|
||||||
|
log.error('config', 'Failed to load site config', { error: e.message });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Build a domain from subdomain + configured TLD or public domain */
|
||||||
|
function buildDomain(subdomain) {
|
||||||
|
if (siteConfig.configurationType === 'public' && siteConfig.domain) {
|
||||||
|
return `${subdomain}.${siteConfig.domain}`;
|
||||||
|
}
|
||||||
|
return `${subdomain}${siteConfig.tld}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Build full service URL (protocol + host + path) */
|
||||||
|
function buildServiceUrl(subdomain) {
|
||||||
|
if (siteConfig.routingMode === 'subdirectory' && siteConfig.domain) {
|
||||||
|
return `https://${siteConfig.domain}/${subdomain}`;
|
||||||
|
}
|
||||||
|
return `https://${buildDomain(subdomain)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
siteConfig,
|
||||||
|
loadSiteConfig,
|
||||||
|
buildDomain,
|
||||||
|
buildServiceUrl,
|
||||||
|
};
|
||||||
184
dashcaddy-api/src/context/caddy.js
Normal file
184
dashcaddy-api/src/context/caddy.js
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
/**
|
||||||
|
* Caddy context - Caddyfile manipulation and reload
|
||||||
|
*/
|
||||||
|
const fsp = require('fs').promises;
|
||||||
|
const { RETRIES } = require('../../constants');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Atomically read-modify-write the Caddyfile and reload Caddy.
|
||||||
|
* Uses a mutex to prevent concurrent modifications.
|
||||||
|
* Rolls back on reload failure.
|
||||||
|
*/
|
||||||
|
let _caddyfileLock = Promise.resolve();
|
||||||
|
|
||||||
|
async function modifyCaddyfile(CADDYFILE_PATH, reloadCaddy, modifyFn) {
|
||||||
|
let resolve;
|
||||||
|
const prev = _caddyfileLock;
|
||||||
|
_caddyfileLock = new Promise(r => { resolve = r; });
|
||||||
|
await prev;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const original = await fsp.readFile(CADDYFILE_PATH, 'utf8');
|
||||||
|
const modified = await modifyFn(original);
|
||||||
|
|
||||||
|
if (modified === null || modified === original) {
|
||||||
|
return { success: false, error: 'No changes to apply' };
|
||||||
|
}
|
||||||
|
|
||||||
|
await fsp.writeFile(CADDYFILE_PATH, modified, 'utf8');
|
||||||
|
|
||||||
|
try {
|
||||||
|
await reloadCaddy(modified);
|
||||||
|
return { success: true };
|
||||||
|
} catch (err) {
|
||||||
|
// Rollback
|
||||||
|
await fsp.writeFile(CADDYFILE_PATH, original, 'utf8');
|
||||||
|
return { success: false, error: err.message, rolledBack: true };
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read the current Caddyfile content
|
||||||
|
*/
|
||||||
|
async function readCaddyfile(CADDYFILE_PATH) {
|
||||||
|
return fsp.readFile(CADDYFILE_PATH, 'utf8');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reload Caddy via admin API
|
||||||
|
*/
|
||||||
|
async function reloadCaddy(CADDY_ADMIN_URL, content, fetchT, log) {
|
||||||
|
const maxRetries = RETRIES.CADDY_RELOAD;
|
||||||
|
let lastError = null;
|
||||||
|
|
||||||
|
for (let i = 0; i < maxRetries; i++) {
|
||||||
|
try {
|
||||||
|
const response = await fetchT(`${CADDY_ADMIN_URL}/load`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'text/caddyfile' },
|
||||||
|
body: content
|
||||||
|
});
|
||||||
|
|
||||||
|
if (response.ok) {
|
||||||
|
log.info('caddy', 'Caddy configuration reloaded successfully');
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
lastError = await response.text();
|
||||||
|
log.warn('caddy', 'Caddy reload attempt failed', { attempt: i + 1, error: lastError });
|
||||||
|
} catch (error) {
|
||||||
|
lastError = error.message;
|
||||||
|
log.warn('caddy', 'Caddy reload attempt error', { attempt: i + 1, error: lastError });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (i < maxRetries - 1) {
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 2000));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`[DC-303] Caddy reload failed after ${maxRetries} attempts: ${lastError}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify a site is accessible via HTTPS
|
||||||
|
*/
|
||||||
|
async function verifySiteAccessible(domain, fetchT, httpsAgent, log, maxAttempts = 5) {
|
||||||
|
const delay = 2000;
|
||||||
|
|
||||||
|
for (let i = 0; i < maxAttempts; i++) {
|
||||||
|
try {
|
||||||
|
const response = await fetchT(`https://${domain}/`, {
|
||||||
|
method: 'HEAD',
|
||||||
|
agent: httpsAgent,
|
||||||
|
timeout: 5000
|
||||||
|
});
|
||||||
|
|
||||||
|
log.info('caddy', 'Site is accessible', { domain, status: response.status });
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
log.debug('caddy', 'Site verification attempt', {
|
||||||
|
domain,
|
||||||
|
attempt: i + 1,
|
||||||
|
maxAttempts,
|
||||||
|
error: error.message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (i < maxAttempts - 1) {
|
||||||
|
await new Promise(resolve => setTimeout(resolve, delay));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.warn('caddy', 'Could not verify site accessibility', { domain });
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate Caddy config block for a service
|
||||||
|
*/
|
||||||
|
function generateCaddyConfig(subdomain, ip, port, siteConfig, buildDomain, options = {}) {
|
||||||
|
const { tailscaleOnly = false, allowedIPs = [], subpathSupport = 'strip' } = options;
|
||||||
|
|
||||||
|
// Subdirectory mode
|
||||||
|
if (siteConfig.routingMode === 'subdirectory' && siteConfig.domain) {
|
||||||
|
let config = '';
|
||||||
|
|
||||||
|
if (subpathSupport === 'native') {
|
||||||
|
config += `\tredir /${subdomain} /${subdomain}/ permanent\n`;
|
||||||
|
config += `\thandle /${subdomain}/* {\n`;
|
||||||
|
} else {
|
||||||
|
config += `\thandle_path /${subdomain}/* {\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tailscaleOnly) {
|
||||||
|
config += `\t\t@blocked not remote_ip 100.64.0.0/10`;
|
||||||
|
if (allowedIPs.length > 0) config += ` ${allowedIPs.join(' ')}`;
|
||||||
|
config += `\n\t\trespond @blocked "Access denied. Tailscale connection required." 403\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
config += `\t\treverse_proxy ${ip}:${port}\n`;
|
||||||
|
config += `\t}`;
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Subdomain mode
|
||||||
|
let config = `${buildDomain(subdomain)} {\n`;
|
||||||
|
|
||||||
|
if (tailscaleOnly) {
|
||||||
|
config += ` @blocked not remote_ip 100.64.0.0/10`;
|
||||||
|
if (allowedIPs.length > 0) {
|
||||||
|
config += ` ${allowedIPs.join(' ')}`;
|
||||||
|
}
|
||||||
|
config += `\n respond @blocked "Access denied. Tailscale connection required." 403\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
config += ` reverse_proxy ${ip}:${port}\n`;
|
||||||
|
config += ` tls internal\n`;
|
||||||
|
config += `}`;
|
||||||
|
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
function createCaddyContext(CADDYFILE_PATH, CADDY_ADMIN_URL, fetchT, httpsAgent, log, siteConfig, buildDomain) {
|
||||||
|
const reload = (content) => reloadCaddy(CADDY_ADMIN_URL, content, fetchT, log);
|
||||||
|
const read = () => readCaddyfile(CADDYFILE_PATH);
|
||||||
|
const modify = (modifyFn) => modifyCaddyfile(CADDYFILE_PATH, reload, modifyFn);
|
||||||
|
const verify = (domain, maxAttempts) => verifySiteAccessible(domain, fetchT, httpsAgent, log, maxAttempts);
|
||||||
|
const generate = (subdomain, ip, port, options) => generateCaddyConfig(subdomain, ip, port, siteConfig, buildDomain, options);
|
||||||
|
|
||||||
|
return {
|
||||||
|
modify,
|
||||||
|
read,
|
||||||
|
reload,
|
||||||
|
generateConfig: generate,
|
||||||
|
verifySite: verify,
|
||||||
|
adminUrl: CADDY_ADMIN_URL,
|
||||||
|
filePath: CADDYFILE_PATH,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { createCaddyContext };
|
||||||
308
dashcaddy-api/src/context/dns.js
Normal file
308
dashcaddy-api/src/context/dns.js
Normal file
@@ -0,0 +1,308 @@
|
|||||||
|
/**
|
||||||
|
* DNS context - Technitium DNS operations and token management
|
||||||
|
*/
|
||||||
|
const { TIMEOUTS, SESSION_TTL, CADDY } = require('../../constants');
|
||||||
|
const { createCache, CACHE_CONFIGS } = require('../../cache-config');
|
||||||
|
|
||||||
|
// DNS token management
|
||||||
|
let dnsToken = process.env.DNS_ADMIN_TOKEN || '';
|
||||||
|
let dnsTokenExpiry = null;
|
||||||
|
|
||||||
|
// Per-server token cache
|
||||||
|
const dnsServerTokens = createCache(CACHE_CONFIGS.dnsTokens);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build full Technitium DNS API URL
|
||||||
|
*/
|
||||||
|
function buildDnsUrl(server, apiPath, params) {
|
||||||
|
const protocol = server.match(/^\d+\.\d+\.\d+\.\d+$/) ? 'http' : 'https';
|
||||||
|
const port = protocol === 'http' ? `:${CADDY.DEFAULT_DNS_PORT}` : '';
|
||||||
|
const qs = params instanceof URLSearchParams ? params.toString() : new URLSearchParams(params).toString();
|
||||||
|
return `${protocol}://${server}${port}${apiPath}?${qs}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Call a Technitium DNS API endpoint
|
||||||
|
*/
|
||||||
|
async function callDns(server, apiPath, params, fetchT, httpsAgent) {
|
||||||
|
const url = buildDnsUrl(server, apiPath, params);
|
||||||
|
const response = await fetchT(url, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: { 'Accept': 'application/json' },
|
||||||
|
agent: httpsAgent
|
||||||
|
}, TIMEOUTS.HTTP_LONG);
|
||||||
|
return response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh DNS token via login
|
||||||
|
*/
|
||||||
|
async function refreshDnsToken(username, password, server, fetchT, log) {
|
||||||
|
try {
|
||||||
|
const params = new URLSearchParams({
|
||||||
|
user: username,
|
||||||
|
pass: password,
|
||||||
|
includeInfo: 'false'
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await fetchT(
|
||||||
|
`http://${server}:5380/api/user/login?${params.toString()}`,
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Accept': 'application/json',
|
||||||
|
'Content-Type': 'application/x-www-form-urlencoded'
|
||||||
|
},
|
||||||
|
timeout: 10000
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const result = await response.json();
|
||||||
|
|
||||||
|
if (result.status === 'ok' && result.token) {
|
||||||
|
dnsToken = result.token;
|
||||||
|
dnsTokenExpiry = new Date(Date.now() + SESSION_TTL.DNS_TOKEN).toISOString();
|
||||||
|
log.info('dns', 'DNS token refreshed', { expires: dnsTokenExpiry });
|
||||||
|
return { success: true, token: dnsToken };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { success: false, error: result.errorMessage || 'Login failed' };
|
||||||
|
} catch (error) {
|
||||||
|
log.error('dns', 'DNS token refresh error', { error: error.message });
|
||||||
|
return { success: false, error: error.message };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ensure we have a valid DNS token (auto-refresh if needed)
|
||||||
|
*/
|
||||||
|
async function ensureValidDnsToken(siteConfig, credentialManager, fetchT, log) {
|
||||||
|
// Check if token is valid and not expired
|
||||||
|
if (dnsToken && dnsTokenExpiry && new Date() < new Date(dnsTokenExpiry)) {
|
||||||
|
return { success: true, token: dnsToken };
|
||||||
|
}
|
||||||
|
|
||||||
|
const primaryIp = siteConfig.dnsServerIp;
|
||||||
|
if (primaryIp) {
|
||||||
|
const dnsId = dnsIpToDnsId(primaryIp, siteConfig);
|
||||||
|
if (dnsId) {
|
||||||
|
for (const role of ['admin', 'readonly']) {
|
||||||
|
try {
|
||||||
|
const username = await credentialManager.retrieve(`dns.${dnsId}.${role}.username`);
|
||||||
|
const password = await credentialManager.retrieve(`dns.${dnsId}.${role}.password`);
|
||||||
|
if (username && password) {
|
||||||
|
return await refreshDnsToken(username, password, primaryIp, fetchT, log);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
log.error('dns', `Per-server ${role} credential error`, { dnsId, error: err.message });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back to global credentials
|
||||||
|
try {
|
||||||
|
const username = await credentialManager.retrieve('dns.username');
|
||||||
|
const password = await credentialManager.retrieve('dns.password');
|
||||||
|
const server = await credentialManager.retrieve('dns.server');
|
||||||
|
if (username && password) {
|
||||||
|
return await refreshDnsToken(username, password, server || primaryIp, fetchT, log);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
log.error('dns', 'Credential manager error', { error: err.message });
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'No DNS credentials configured. Please set up credentials via /api/dns/credentials'
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Map DNS server IP to its ID
|
||||||
|
*/
|
||||||
|
function dnsIpToDnsId(serverIp, siteConfig) {
|
||||||
|
for (const [dnsId, info] of Object.entries(siteConfig.dnsServers || {})) {
|
||||||
|
if (info.ip === serverIp) return dnsId;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a valid token for a specific DNS server
|
||||||
|
*/
|
||||||
|
async function getTokenForServer(targetServer, siteConfig, credentialManager, fetchT, log, role = 'readonly') {
|
||||||
|
const cacheKey = `${targetServer}:${role}`;
|
||||||
|
const cached = dnsServerTokens.get(cacheKey);
|
||||||
|
|
||||||
|
if (cached && cached.token && cached.expiry && new Date() < new Date(cached.expiry)) {
|
||||||
|
return { success: true, token: cached.token };
|
||||||
|
}
|
||||||
|
|
||||||
|
const serverPort = siteConfig.dnsServerPort || '5380';
|
||||||
|
|
||||||
|
async function authenticateToServer(username, password) {
|
||||||
|
const params = new URLSearchParams({
|
||||||
|
user: username,
|
||||||
|
pass: password,
|
||||||
|
includeInfo: 'false'
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await fetchT(
|
||||||
|
`http://${targetServer}:${serverPort}/api/user/login?${params.toString()}`,
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Accept': 'application/json',
|
||||||
|
'Content-Type': 'application/x-www-form-urlencoded'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const result = await response.json();
|
||||||
|
|
||||||
|
if (result.status === 'ok' && result.token) {
|
||||||
|
dnsServerTokens.set(cacheKey, {
|
||||||
|
token: result.token,
|
||||||
|
expiry: new Date(Date.now() + SESSION_TTL.DNS_TOKEN).toISOString()
|
||||||
|
});
|
||||||
|
log.info('dns', 'DNS token obtained for server', { server: targetServer, role });
|
||||||
|
return { success: true, token: result.token };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { success: false, error: result.errorMessage || 'Login failed' };
|
||||||
|
}
|
||||||
|
|
||||||
|
const dnsId = dnsIpToDnsId(targetServer, siteConfig);
|
||||||
|
|
||||||
|
if (dnsId) {
|
||||||
|
try {
|
||||||
|
const username = await credentialManager.retrieve(`dns.${dnsId}.${role}.username`);
|
||||||
|
const password = await credentialManager.retrieve(`dns.${dnsId}.${role}.password`);
|
||||||
|
if (username && password) {
|
||||||
|
return await authenticateToServer(username, password);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
log.error('dns', `Per-server ${role} credential error`, { dnsId, server: targetServer, error: err.message });
|
||||||
|
}
|
||||||
|
|
||||||
|
const fallbackRole = role === 'readonly' ? 'admin' : 'readonly';
|
||||||
|
try {
|
||||||
|
const username = await credentialManager.retrieve(`dns.${dnsId}.${fallbackRole}.username`);
|
||||||
|
const password = await credentialManager.retrieve(`dns.${dnsId}.${fallbackRole}.password`);
|
||||||
|
if (username && password) {
|
||||||
|
return await authenticateToServer(username, password);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const username = await credentialManager.retrieve('dns.username');
|
||||||
|
const password = await credentialManager.retrieve('dns.password');
|
||||||
|
if (username && password) {
|
||||||
|
return await authenticateToServer(username, password);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
log.error('dns', 'Credential manager error', { server: targetServer, error: err.message });
|
||||||
|
}
|
||||||
|
|
||||||
|
return { success: false, error: 'No DNS credentials configured' };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Require a DNS token (throw if unavailable)
|
||||||
|
*/
|
||||||
|
async function requireDnsToken(providedToken, siteConfig, credentialManager, fetchT, log) {
|
||||||
|
if (providedToken) return providedToken;
|
||||||
|
const result = await ensureValidDnsToken(siteConfig, credentialManager, fetchT, log);
|
||||||
|
if (result.success) return result.token;
|
||||||
|
const err = new Error('No valid DNS token available. ' + result.error);
|
||||||
|
err.statusCode = 401;
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create DNS record
|
||||||
|
*/
|
||||||
|
async function createDnsRecord(subdomain, ip, siteConfig, buildDomain, fetchT, httpsAgent, log) {
|
||||||
|
const tokenResult = await ensureValidDnsToken(siteConfig, credentialManager, fetchT, log);
|
||||||
|
if (!tokenResult.success) {
|
||||||
|
throw new Error(`DNS token not available: ${tokenResult.error}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const domain = buildDomain(subdomain);
|
||||||
|
const zone = siteConfig.tld.replace(/^\./, '');
|
||||||
|
|
||||||
|
const dnsParams = {
|
||||||
|
token: dnsToken,
|
||||||
|
domain,
|
||||||
|
zone,
|
||||||
|
type: 'A',
|
||||||
|
ipAddress: ip,
|
||||||
|
ttl: '300',
|
||||||
|
overwrite: 'true'
|
||||||
|
};
|
||||||
|
|
||||||
|
const callDnsApi = () => callDns(siteConfig.dnsServerIp, '/api/zones/records/add', dnsParams, fetchT, httpsAgent);
|
||||||
|
|
||||||
|
try {
|
||||||
|
log.info('dns', 'Creating DNS record', { domain, ip });
|
||||||
|
const result = await callDnsApi();
|
||||||
|
|
||||||
|
if (result.status === 'ok') {
|
||||||
|
log.info('dns', 'DNS record created', { domain, ip });
|
||||||
|
return { success: true };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.errorMessage && result.errorMessage.toLowerCase().includes('token')) {
|
||||||
|
log.info('dns', 'Token appears expired, attempting auto-refresh');
|
||||||
|
const refreshResult = await ensureValidDnsToken(siteConfig, credentialManager, fetchT, log);
|
||||||
|
if (!refreshResult.success) throw new Error(`Token refresh failed: ${refreshResult.error}`);
|
||||||
|
|
||||||
|
const retryResult = await callDnsApi();
|
||||||
|
if (retryResult.status === 'ok') {
|
||||||
|
log.info('dns', 'DNS record created after token refresh', { domain, ip });
|
||||||
|
return { success: true };
|
||||||
|
}
|
||||||
|
throw new Error(retryResult.errorMessage || 'Unknown error after token refresh');
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(result.errorMessage || 'Unknown error');
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Failed to create DNS record for ${domain}: ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function invalidateTokenForServer(serverIp) {
|
||||||
|
dnsServerTokens.delete(`${serverIp}:readonly`);
|
||||||
|
dnsServerTokens.delete(`${serverIp}:admin`);
|
||||||
|
}
|
||||||
|
|
||||||
|
function createDnsContext(siteConfig, buildDomain, credentialManager, fetchT, httpsAgent, log, DNS_CREDENTIALS_FILE) {
|
||||||
|
const ensureToken = () => ensureValidDnsToken(siteConfig, credentialManager, fetchT, log);
|
||||||
|
const require = (providedToken) => requireDnsToken(providedToken, siteConfig, credentialManager, fetchT, log);
|
||||||
|
const getForServer = (server, role) => getTokenForServer(server, siteConfig, credentialManager, fetchT, log, role);
|
||||||
|
const refresh = (username, password, server) => refreshDnsToken(username, password, server, fetchT, log);
|
||||||
|
const create = (subdomain, ip) => createDnsRecord(subdomain, ip, siteConfig, buildDomain, fetchT, httpsAgent, log);
|
||||||
|
const call = (server, apiPath, params) => callDns(server, apiPath, params, fetchT, httpsAgent);
|
||||||
|
|
||||||
|
return {
|
||||||
|
call,
|
||||||
|
buildUrl: buildDnsUrl,
|
||||||
|
requireToken: require,
|
||||||
|
ensureToken,
|
||||||
|
createRecord: create,
|
||||||
|
getToken: () => dnsToken,
|
||||||
|
setToken: (t) => { dnsToken = t; },
|
||||||
|
getTokenExpiry: () => dnsTokenExpiry,
|
||||||
|
setTokenExpiry: (e) => { dnsTokenExpiry = e; },
|
||||||
|
getTokenForServer: getForServer,
|
||||||
|
invalidateTokenForServer,
|
||||||
|
refresh,
|
||||||
|
credentialsFile: DNS_CREDENTIALS_FILE,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { createDnsContext };
|
||||||
67
dashcaddy-api/src/context/docker.js
Normal file
67
dashcaddy-api/src/context/docker.js
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
/**
|
||||||
|
* Docker context - Docker client and operations
|
||||||
|
*/
|
||||||
|
const Docker = require('dockerode');
|
||||||
|
const { DOCKER } = require('../../constants');
|
||||||
|
|
||||||
|
const docker = new Docker();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pull a Docker image with timeout protection
|
||||||
|
*/
|
||||||
|
function dockerPull(imageName, timeoutMs = DOCKER.TIMEOUT) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const timer = setTimeout(
|
||||||
|
() => reject(new Error(`Docker pull timed out after ${timeoutMs / 1000}s: ${imageName}`)),
|
||||||
|
timeoutMs
|
||||||
|
);
|
||||||
|
docker.pull(imageName, (err, stream) => {
|
||||||
|
if (err) {
|
||||||
|
clearTimeout(timer);
|
||||||
|
return reject(err);
|
||||||
|
}
|
||||||
|
docker.modem.followProgress(stream, (err, output) => {
|
||||||
|
clearTimeout(timer);
|
||||||
|
if (err) return reject(err);
|
||||||
|
resolve(output);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find a running Docker container by name substring
|
||||||
|
*/
|
||||||
|
async function findContainerByName(name, opts = { all: false }) {
|
||||||
|
const containers = await docker.listContainers(opts);
|
||||||
|
const match = containers.find(c =>
|
||||||
|
c.Names.some(n => n.toLowerCase().includes(name.toLowerCase()))
|
||||||
|
);
|
||||||
|
return match || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all host ports currently in use by Docker containers
|
||||||
|
*/
|
||||||
|
async function getUsedPorts() {
|
||||||
|
const containers = await docker.listContainers({ all: false });
|
||||||
|
const ports = new Set();
|
||||||
|
for (const c of containers) {
|
||||||
|
for (const p of (c.Ports || [])) {
|
||||||
|
if (p.PublicPort) ports.add(p.PublicPort);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ports;
|
||||||
|
}
|
||||||
|
|
||||||
|
function createDockerContext(dockerSecurity) {
|
||||||
|
return {
|
||||||
|
client: docker,
|
||||||
|
pull: dockerPull,
|
||||||
|
findContainer: findContainerByName,
|
||||||
|
getUsedPorts,
|
||||||
|
security: dockerSecurity,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { createDockerContext };
|
||||||
175
dashcaddy-api/src/context/index.js
Normal file
175
dashcaddy-api/src/context/index.js
Normal file
@@ -0,0 +1,175 @@
|
|||||||
|
/**
|
||||||
|
* Context assembly - Dependency injection container
|
||||||
|
* Assembles all context objects needed by routes
|
||||||
|
*/
|
||||||
|
const { createDockerContext } = require('./docker');
|
||||||
|
const { createCaddyContext } = require('./caddy');
|
||||||
|
const { createDnsContext } = require('./dns');
|
||||||
|
const { createSessionContext } = require('./session');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Assemble the full application context
|
||||||
|
* This replaces the old "god object" ctx with explicit construction
|
||||||
|
*/
|
||||||
|
function assembleContext({
|
||||||
|
// Config
|
||||||
|
siteConfig,
|
||||||
|
buildDomain,
|
||||||
|
buildServiceUrl,
|
||||||
|
SERVICES_FILE,
|
||||||
|
CONFIG_FILE,
|
||||||
|
TOTP_CONFIG_FILE,
|
||||||
|
TAILSCALE_CONFIG_FILE,
|
||||||
|
NOTIFICATIONS_FILE,
|
||||||
|
ERROR_LOG_FILE,
|
||||||
|
DNS_CREDENTIALS_FILE,
|
||||||
|
CADDYFILE_PATH,
|
||||||
|
CADDY_ADMIN_URL,
|
||||||
|
|
||||||
|
// State managers
|
||||||
|
servicesStateManager,
|
||||||
|
configStateManager,
|
||||||
|
|
||||||
|
// Managers
|
||||||
|
credentialManager,
|
||||||
|
authManager,
|
||||||
|
licenseManager,
|
||||||
|
healthChecker,
|
||||||
|
updateManager,
|
||||||
|
backupManager,
|
||||||
|
resourceMonitor,
|
||||||
|
auditLogger,
|
||||||
|
portLockManager,
|
||||||
|
selfUpdater,
|
||||||
|
dockerMaintenance,
|
||||||
|
logDigest,
|
||||||
|
dockerSecurity,
|
||||||
|
|
||||||
|
// Templates
|
||||||
|
APP_TEMPLATES,
|
||||||
|
TEMPLATE_CATEGORIES,
|
||||||
|
DIFFICULTY_LEVELS,
|
||||||
|
RECIPE_TEMPLATES,
|
||||||
|
RECIPE_CATEGORIES,
|
||||||
|
|
||||||
|
// Helpers
|
||||||
|
asyncHandler,
|
||||||
|
errorResponse,
|
||||||
|
ok,
|
||||||
|
fetchT,
|
||||||
|
httpsAgent,
|
||||||
|
log,
|
||||||
|
logError,
|
||||||
|
safeErrorMessage,
|
||||||
|
getServiceById,
|
||||||
|
readConfig,
|
||||||
|
saveConfig,
|
||||||
|
addServiceToConfig,
|
||||||
|
validateURL,
|
||||||
|
strictLimiter,
|
||||||
|
totpConfig,
|
||||||
|
saveTotpConfig,
|
||||||
|
loadSiteConfig,
|
||||||
|
loadNotificationConfig,
|
||||||
|
resyncHealthChecker,
|
||||||
|
|
||||||
|
// Middleware result
|
||||||
|
middlewareResult,
|
||||||
|
|
||||||
|
// App
|
||||||
|
app,
|
||||||
|
}) {
|
||||||
|
// Create domain-specific contexts
|
||||||
|
const docker = createDockerContext(dockerSecurity);
|
||||||
|
const caddy = createCaddyContext(CADDYFILE_PATH, CADDY_ADMIN_URL, fetchT, httpsAgent, log, siteConfig, buildDomain);
|
||||||
|
const dns = createDnsContext(siteConfig, buildDomain, credentialManager, fetchT, httpsAgent, log, DNS_CREDENTIALS_FILE);
|
||||||
|
const session = createSessionContext(middlewareResult);
|
||||||
|
|
||||||
|
// Notification context (inline for now - could be extracted)
|
||||||
|
const notification = {
|
||||||
|
// These will be populated by server.js for now
|
||||||
|
// TODO: Extract notification module
|
||||||
|
};
|
||||||
|
|
||||||
|
// Tailscale context (inline for now - could be extracted)
|
||||||
|
const tailscale = {
|
||||||
|
// These will be populated by server.js for now
|
||||||
|
// TODO: Extract tailscale module
|
||||||
|
};
|
||||||
|
|
||||||
|
// Assemble flat context (temporary - routes still expect this)
|
||||||
|
const ctx = {
|
||||||
|
// Namespaced contexts
|
||||||
|
docker,
|
||||||
|
caddy,
|
||||||
|
dns,
|
||||||
|
session,
|
||||||
|
notification,
|
||||||
|
tailscale,
|
||||||
|
|
||||||
|
// App and config
|
||||||
|
app,
|
||||||
|
siteConfig,
|
||||||
|
|
||||||
|
// State managers
|
||||||
|
servicesStateManager,
|
||||||
|
configStateManager,
|
||||||
|
|
||||||
|
// Managers
|
||||||
|
credentialManager,
|
||||||
|
authManager,
|
||||||
|
licenseManager,
|
||||||
|
healthChecker,
|
||||||
|
updateManager,
|
||||||
|
backupManager,
|
||||||
|
resourceMonitor,
|
||||||
|
auditLogger,
|
||||||
|
portLockManager,
|
||||||
|
selfUpdater,
|
||||||
|
dockerMaintenance,
|
||||||
|
logDigest,
|
||||||
|
|
||||||
|
// Templates
|
||||||
|
APP_TEMPLATES,
|
||||||
|
TEMPLATE_CATEGORIES,
|
||||||
|
DIFFICULTY_LEVELS,
|
||||||
|
RECIPE_TEMPLATES,
|
||||||
|
RECIPE_CATEGORIES,
|
||||||
|
|
||||||
|
// Helpers
|
||||||
|
asyncHandler,
|
||||||
|
errorResponse,
|
||||||
|
ok,
|
||||||
|
fetchT,
|
||||||
|
log,
|
||||||
|
logError,
|
||||||
|
safeErrorMessage,
|
||||||
|
buildDomain,
|
||||||
|
buildServiceUrl,
|
||||||
|
getServiceById,
|
||||||
|
readConfig,
|
||||||
|
saveConfig,
|
||||||
|
addServiceToConfig,
|
||||||
|
validateURL,
|
||||||
|
strictLimiter,
|
||||||
|
|
||||||
|
// Config helpers
|
||||||
|
totpConfig,
|
||||||
|
saveTotpConfig,
|
||||||
|
loadSiteConfig,
|
||||||
|
loadNotificationConfig,
|
||||||
|
resyncHealthChecker,
|
||||||
|
|
||||||
|
// File paths
|
||||||
|
SERVICES_FILE,
|
||||||
|
CONFIG_FILE,
|
||||||
|
TOTP_CONFIG_FILE,
|
||||||
|
TAILSCALE_CONFIG_FILE,
|
||||||
|
NOTIFICATIONS_FILE,
|
||||||
|
ERROR_LOG_FILE,
|
||||||
|
};
|
||||||
|
|
||||||
|
return ctx;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { assembleContext };
|
||||||
21
dashcaddy-api/src/context/session.js
Normal file
21
dashcaddy-api/src/context/session.js
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
/**
|
||||||
|
* Session context - IP-based session management
|
||||||
|
* (Implementation provided by middleware, just re-exported here)
|
||||||
|
*/
|
||||||
|
|
||||||
|
function createSessionContext(middlewareResult) {
|
||||||
|
const { ipSessions, SESSION_DURATIONS, getClientIP, createIPSession, setSessionCookie, clearIPSession, clearSessionCookie, isSessionValid } = middlewareResult;
|
||||||
|
|
||||||
|
return {
|
||||||
|
ipSessions,
|
||||||
|
durations: SESSION_DURATIONS,
|
||||||
|
getClientIP,
|
||||||
|
create: createIPSession,
|
||||||
|
setCookie: setSessionCookie,
|
||||||
|
clear: clearIPSession,
|
||||||
|
clearCookie: clearSessionCookie,
|
||||||
|
isValid: isSessionValid,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { createSessionContext };
|
||||||
30
dashcaddy-api/src/utils/async-handler.js
Normal file
30
dashcaddy-api/src/utils/async-handler.js
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
/**
|
||||||
|
* Async handler wrapper - Eliminates try/catch boilerplate
|
||||||
|
*/
|
||||||
|
const { AppError } = require('../../errors');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wrap async route handlers - catches errors and logs them
|
||||||
|
*/
|
||||||
|
function asyncHandler(logError, fn, context) {
|
||||||
|
return async (req, res, next) => {
|
||||||
|
try {
|
||||||
|
await fn(req, res, next);
|
||||||
|
} catch (error) {
|
||||||
|
// Let typed errors propagate to global error handler
|
||||||
|
if (error instanceof AppError) {
|
||||||
|
return next(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
await logError(context || req.path, error);
|
||||||
|
|
||||||
|
if (!res.headersSent) {
|
||||||
|
const { errorResponse } = require('./responses');
|
||||||
|
const { safeErrorMessage } = require('./logging');
|
||||||
|
errorResponse(res, 500, safeErrorMessage(error));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { asyncHandler };
|
||||||
80
dashcaddy-api/src/utils/http.js
Normal file
80
dashcaddy-api/src/utils/http.js
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
/**
|
||||||
|
* HTTP utilities - Fetch helpers and HTTP operations
|
||||||
|
*/
|
||||||
|
const http = require('http');
|
||||||
|
const { TIMEOUTS } = require('../../constants');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch with automatic timeout
|
||||||
|
* Drop-in replacement for fetch() with AbortSignal timeout
|
||||||
|
*/
|
||||||
|
function fetchT(url, opts = {}, timeoutMs = TIMEOUTS.HTTP_DEFAULT) {
|
||||||
|
// Caddy admin API rejects Node.js undici fetch - use raw http.request
|
||||||
|
if (url.includes(':2019')) {
|
||||||
|
return _httpFetch(url, opts, timeoutMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!opts.signal) {
|
||||||
|
opts = { ...opts, signal: AbortSignal.timeout(timeoutMs) };
|
||||||
|
}
|
||||||
|
delete opts.timeout;
|
||||||
|
return fetch(url, opts);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Raw http.request wrapper for Caddy admin API
|
||||||
|
*/
|
||||||
|
function _httpFetch(url, opts = {}, timeoutMs = TIMEOUTS.HTTP_DEFAULT) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const parsed = new URL(url);
|
||||||
|
const options = {
|
||||||
|
hostname: parsed.hostname,
|
||||||
|
port: parsed.port || 2019,
|
||||||
|
path: parsed.pathname + parsed.search,
|
||||||
|
method: (opts.method || 'GET').toUpperCase(),
|
||||||
|
headers: { ...opts.headers },
|
||||||
|
timeout: timeoutMs,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (opts.body) {
|
||||||
|
options.headers['Content-Length'] = Buffer.byteLength(opts.body);
|
||||||
|
}
|
||||||
|
|
||||||
|
const MAX_RESPONSE_SIZE = 10 * 1024 * 1024; // 10MB
|
||||||
|
const req = http.request(options, (res) => {
|
||||||
|
let data = '';
|
||||||
|
let size = 0;
|
||||||
|
|
||||||
|
res.on('data', chunk => {
|
||||||
|
size += chunk.length;
|
||||||
|
if (size > MAX_RESPONSE_SIZE) {
|
||||||
|
res.destroy();
|
||||||
|
reject(new Error(`Response from ${url} exceeded ${MAX_RESPONSE_SIZE} bytes`));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
data += chunk;
|
||||||
|
});
|
||||||
|
|
||||||
|
res.on('end', () => {
|
||||||
|
resolve({
|
||||||
|
ok: res.statusCode >= 200 && res.statusCode < 300,
|
||||||
|
status: res.statusCode,
|
||||||
|
statusText: res.statusMessage,
|
||||||
|
json: () => Promise.resolve(JSON.parse(data)),
|
||||||
|
text: () => Promise.resolve(data),
|
||||||
|
headers: { get: (k) => res.headers[k.toLowerCase()] },
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
req.on('timeout', () => {
|
||||||
|
req.destroy();
|
||||||
|
reject(new Error(`Request to ${url} timed out after ${timeoutMs}ms`));
|
||||||
|
});
|
||||||
|
req.on('error', reject);
|
||||||
|
if (opts.body) req.write(opts.body);
|
||||||
|
req.end();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { fetchT };
|
||||||
25
dashcaddy-api/src/utils/index.js
Normal file
25
dashcaddy-api/src/utils/index.js
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
/**
|
||||||
|
* Utilities index - Re-export all utility modules
|
||||||
|
*/
|
||||||
|
const { fetchT } = require('./http');
|
||||||
|
const { LOG_LEVELS, createLogger, logError, safeErrorMessage } = require('./logging');
|
||||||
|
const { errorResponse, ok } = require('./responses');
|
||||||
|
const { asyncHandler } = require('./async-handler');
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
// HTTP
|
||||||
|
fetchT,
|
||||||
|
|
||||||
|
// Logging
|
||||||
|
LOG_LEVELS,
|
||||||
|
createLogger,
|
||||||
|
logError,
|
||||||
|
safeErrorMessage,
|
||||||
|
|
||||||
|
// Responses
|
||||||
|
errorResponse,
|
||||||
|
ok,
|
||||||
|
|
||||||
|
// Async handling
|
||||||
|
asyncHandler,
|
||||||
|
};
|
||||||
119
dashcaddy-api/src/utils/logging.js
Normal file
119
dashcaddy-api/src/utils/logging.js
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
/**
|
||||||
|
* Logging utilities - Structured logging and error handling
|
||||||
|
*/
|
||||||
|
const fsp = require('fs').promises;
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
const LOG_LEVELS = { debug: 0, info: 1, warn: 2, error: 3 };
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a structured logger
|
||||||
|
*/
|
||||||
|
function createLogger(LOG_LEVEL) {
|
||||||
|
function log(level, context, message, data = {}) {
|
||||||
|
if (LOG_LEVELS[level] < LOG_LEVEL) return;
|
||||||
|
|
||||||
|
const entry = {
|
||||||
|
t: new Date().toISOString(),
|
||||||
|
level,
|
||||||
|
ctx: context,
|
||||||
|
msg: message,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (Object.keys(data).length) entry.data = data;
|
||||||
|
|
||||||
|
const fn = level === 'error' ? console.error : level === 'warn' ? console.warn : console.info;
|
||||||
|
fn(JSON.stringify(entry));
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info = (ctx, msg, data) => log('info', ctx, msg, data);
|
||||||
|
log.warn = (ctx, msg, data) => log('warn', ctx, msg, data);
|
||||||
|
log.error = (ctx, msg, data) => log('error', ctx, msg, data);
|
||||||
|
log.debug = (ctx, msg, data) => log('debug', ctx, msg, data);
|
||||||
|
|
||||||
|
return log;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enhanced error logging with context tracking
|
||||||
|
*/
|
||||||
|
async function logError(ERROR_LOG_FILE, MAX_ERROR_LOG_SIZE, context, error, additionalInfo = {}, log) {
|
||||||
|
const timestamp = new Date().toISOString();
|
||||||
|
|
||||||
|
// Extract request context
|
||||||
|
const requestContext = {};
|
||||||
|
if (additionalInfo.req) {
|
||||||
|
const req = additionalInfo.req;
|
||||||
|
const clientIP = req.ip || req.socket?.remoteAddress || '';
|
||||||
|
requestContext.requestId = req.id;
|
||||||
|
requestContext.ip = clientIP;
|
||||||
|
requestContext.userAgent = req.get('user-agent');
|
||||||
|
requestContext.method = req.method;
|
||||||
|
requestContext.path = req.path;
|
||||||
|
delete additionalInfo.req;
|
||||||
|
}
|
||||||
|
|
||||||
|
const logEntry = {
|
||||||
|
timestamp,
|
||||||
|
context,
|
||||||
|
...requestContext,
|
||||||
|
error: {
|
||||||
|
message: error.message || error,
|
||||||
|
stack: error.stack,
|
||||||
|
code: error.code
|
||||||
|
},
|
||||||
|
...additionalInfo
|
||||||
|
};
|
||||||
|
|
||||||
|
const contextInfo = Object.keys(requestContext).length > 0
|
||||||
|
? `\nRequest Context: ${JSON.stringify(requestContext, null, 2)}`
|
||||||
|
: '';
|
||||||
|
const logLine = `[${timestamp}] ${context}: ${error.message || error}\n${error.stack || ''}${contextInfo}\nAdditional Info: ${JSON.stringify(additionalInfo, null, 2)}\n${'='.repeat(80)}\n`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Rotate log if it exceeds max size
|
||||||
|
try {
|
||||||
|
const stats = await fsp.stat(ERROR_LOG_FILE);
|
||||||
|
if (stats.size > MAX_ERROR_LOG_SIZE) {
|
||||||
|
const rotated = ERROR_LOG_FILE + '.1';
|
||||||
|
const exists = await fsp.access(rotated).then(() => true).catch(() => false);
|
||||||
|
if (exists) await fsp.unlink(rotated);
|
||||||
|
await fsp.rename(ERROR_LOG_FILE, rotated);
|
||||||
|
}
|
||||||
|
} catch (_) { /* file may not exist yet */ }
|
||||||
|
|
||||||
|
await fsp.appendFile(ERROR_LOG_FILE, logLine);
|
||||||
|
} catch (e) {
|
||||||
|
if (log && log.error) {
|
||||||
|
log.error('errorlog', 'Failed to write to error log', { error: e.message });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a safe error message without leaking internals
|
||||||
|
*/
|
||||||
|
function safeErrorMessage(error) {
|
||||||
|
const msg = error.message || String(error);
|
||||||
|
|
||||||
|
// Detect port conflict errors
|
||||||
|
const portMatch = msg.match(/exposing port TCP [^:]*:(\d+)/);
|
||||||
|
if (portMatch || msg.includes('port is already allocated') || msg.includes('ports are not available')) {
|
||||||
|
const port = portMatch ? portMatch[1] : 'requested';
|
||||||
|
return `[DC-200] Port ${port} is already in use. Try a different port or stop the service using that port first.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only expose short, user-facing messages
|
||||||
|
if (msg.length < 200 && !msg.includes('/') && !msg.includes('\\') && !msg.includes(' at ')) {
|
||||||
|
return msg;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 'An internal error occurred';
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
LOG_LEVELS,
|
||||||
|
createLogger,
|
||||||
|
logError,
|
||||||
|
safeErrorMessage,
|
||||||
|
};
|
||||||
22
dashcaddy-api/src/utils/responses.js
Normal file
22
dashcaddy-api/src/utils/responses.js
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
/**
|
||||||
|
* Response helpers - Standard API response formats
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Standard error response
|
||||||
|
*/
|
||||||
|
function errorResponse(res, statusCode, message, extras = {}) {
|
||||||
|
return res.status(statusCode).json({ success: false, error: message, ...extras });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Standard success response
|
||||||
|
*/
|
||||||
|
function ok(res, data = {}) {
|
||||||
|
return res.json({ success: true, ...data });
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
errorResponse,
|
||||||
|
ok,
|
||||||
|
};
|
||||||
13
fix-ctx-routes.sh
Normal file
13
fix-ctx-routes.sh
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Systematically fix ctx.* references in all route files
|
||||||
|
|
||||||
|
cd /root/.openclaw/agents/main/workspace/dashcaddy-work/dashcaddy-api
|
||||||
|
|
||||||
|
# Find all route files with ctx errors
|
||||||
|
echo "Finding routes with ctx errors..."
|
||||||
|
for file in $(find routes -name "*.js" -type f | grep -v index.js | grep -v helpers.js); do
|
||||||
|
errors=$(npx eslint "$file" 2>&1 | grep -c "'ctx' is not defined")
|
||||||
|
if [ "$errors" -gt 0 ]; then
|
||||||
|
echo "$errors errors in $file"
|
||||||
|
fi
|
||||||
|
done | sort -rn
|
||||||
Reference in New Issue
Block a user