Phase 1: Add ESLint/Prettier config + baseline auto-fixes
This commit is contained in:
4
dashcaddy-api/.eslintignore
Normal file
4
dashcaddy-api/.eslintignore
Normal file
@@ -0,0 +1,4 @@
|
||||
node_modules/
|
||||
coverage/
|
||||
dist/
|
||||
*.min.js
|
||||
56
dashcaddy-api/.eslintrc.js
Normal file
56
dashcaddy-api/.eslintrc.js
Normal file
@@ -0,0 +1,56 @@
|
||||
module.exports = {
|
||||
env: {
|
||||
node: true,
|
||||
es2021: true,
|
||||
jest: true,
|
||||
},
|
||||
extends: 'eslint:recommended',
|
||||
parserOptions: {
|
||||
ecmaVersion: 2021,
|
||||
},
|
||||
rules: {
|
||||
// Possible errors
|
||||
'no-await-in-loop': 'warn',
|
||||
'no-console': 'off', // We use console in server code
|
||||
'no-template-curly-in-string': 'error',
|
||||
|
||||
// Best practices
|
||||
'curly': ['error', 'multi-line'],
|
||||
'eqeqeq': ['error', 'always', { null: 'ignore' }],
|
||||
'no-eval': 'error',
|
||||
'no-implied-eval': 'error',
|
||||
'no-return-await': 'error',
|
||||
'no-throw-literal': 'error',
|
||||
'prefer-promise-reject-errors': 'error',
|
||||
'require-await': 'warn',
|
||||
|
||||
// Variables
|
||||
'no-unused-vars': ['error', {
|
||||
argsIgnorePattern: '^_',
|
||||
varsIgnorePattern: '^_',
|
||||
}],
|
||||
'no-use-before-define': ['error', {
|
||||
functions: false,
|
||||
classes: true,
|
||||
}],
|
||||
|
||||
// Stylistic
|
||||
'comma-dangle': ['error', 'always-multiline'],
|
||||
'quotes': ['error', 'single', { avoidEscape: true }],
|
||||
'semi': ['error', 'always'],
|
||||
'indent': ['error', 2, { SwitchCase: 1 }],
|
||||
'max-len': ['warn', {
|
||||
code: 120,
|
||||
ignoreUrls: true,
|
||||
ignoreStrings: true,
|
||||
ignoreTemplateLiterals: true,
|
||||
}],
|
||||
|
||||
// ES6
|
||||
'arrow-spacing': 'error',
|
||||
'no-var': 'error',
|
||||
'prefer-const': 'error',
|
||||
'prefer-arrow-callback': 'warn',
|
||||
'prefer-template': 'warn',
|
||||
},
|
||||
};
|
||||
8
dashcaddy-api/.prettierrc
Normal file
8
dashcaddy-api/.prettierrc
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"semi": true,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "es5",
|
||||
"tabWidth": 2,
|
||||
"printWidth": 120,
|
||||
"arrowParens": "always"
|
||||
}
|
||||
@@ -77,7 +77,7 @@ describe('API Endpoints', () => {
|
||||
name: 'Test Service',
|
||||
logo: '/assets/test.png',
|
||||
ip: 'localhost',
|
||||
tailscaleOnly: false
|
||||
tailscaleOnly: false,
|
||||
});
|
||||
|
||||
// Now get services
|
||||
@@ -87,7 +87,7 @@ describe('API Endpoints', () => {
|
||||
expect(res.body.length).toBe(1);
|
||||
expect(res.body[0]).toMatchObject({
|
||||
id: 'test-service',
|
||||
name: 'Test Service'
|
||||
name: 'Test Service',
|
||||
});
|
||||
});
|
||||
|
||||
@@ -113,7 +113,7 @@ describe('API Endpoints', () => {
|
||||
name: 'Plex',
|
||||
logo: '/assets/plex.png',
|
||||
ip: 'localhost',
|
||||
tailscaleOnly: false
|
||||
tailscaleOnly: false,
|
||||
};
|
||||
|
||||
const res = await request(app)
|
||||
@@ -134,7 +134,7 @@ describe('API Endpoints', () => {
|
||||
test('should reject duplicate service IDs', async () => {
|
||||
const service = {
|
||||
id: 'duplicate',
|
||||
name: 'Duplicate Service'
|
||||
name: 'Duplicate Service',
|
||||
};
|
||||
|
||||
// Add first time
|
||||
@@ -153,7 +153,7 @@ describe('API Endpoints', () => {
|
||||
.post('/api/services')
|
||||
.send({
|
||||
// Missing 'id' and 'name'
|
||||
logo: '/assets/test.png'
|
||||
logo: '/assets/test.png',
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
@@ -164,7 +164,7 @@ describe('API Endpoints', () => {
|
||||
const maliciousService = {
|
||||
id: 'test<script>alert(1)</script>',
|
||||
name: '<img src=x onerror=alert(1)>',
|
||||
logo: '/assets/test.png'
|
||||
logo: '/assets/test.png',
|
||||
};
|
||||
|
||||
const res = await request(app)
|
||||
@@ -192,8 +192,8 @@ describe('API Endpoints', () => {
|
||||
promises.push(
|
||||
request(app).post('/api/services').send({
|
||||
id: `service-${i}`,
|
||||
name: `Service ${i}`
|
||||
})
|
||||
name: `Service ${i}`,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -215,11 +215,11 @@ describe('API Endpoints', () => {
|
||||
// Add test services
|
||||
await request(app).post('/api/services').send({
|
||||
id: 'service1',
|
||||
name: 'Service 1'
|
||||
name: 'Service 1',
|
||||
});
|
||||
await request(app).post('/api/services').send({
|
||||
id: 'service2',
|
||||
name: 'Service 2'
|
||||
name: 'Service 2',
|
||||
});
|
||||
});
|
||||
|
||||
@@ -246,7 +246,7 @@ describe('API Endpoints', () => {
|
||||
// Try to delete the same service twice simultaneously
|
||||
const promises = [
|
||||
request(app).delete('/api/services/service1'),
|
||||
request(app).delete('/api/services/service1')
|
||||
request(app).delete('/api/services/service1'),
|
||||
];
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
@@ -263,7 +263,7 @@ describe('API Endpoints', () => {
|
||||
const services = [
|
||||
{ id: 'plex', name: 'Plex' },
|
||||
{ id: 'jellyfin', name: 'Jellyfin' },
|
||||
{ id: 'emby', name: 'Emby' }
|
||||
{ id: 'emby', name: 'Emby' },
|
||||
];
|
||||
|
||||
const res = await request(app)
|
||||
@@ -282,13 +282,13 @@ describe('API Endpoints', () => {
|
||||
// Add initial service
|
||||
await request(app).post('/api/services').send({
|
||||
id: 'old',
|
||||
name: 'Old Service'
|
||||
name: 'Old Service',
|
||||
});
|
||||
|
||||
// Import new services (should replace)
|
||||
const newServices = [
|
||||
{ id: 'new1', name: 'New Service 1' },
|
||||
{ id: 'new2', name: 'New Service 2' }
|
||||
{ id: 'new2', name: 'New Service 2' },
|
||||
];
|
||||
|
||||
await request(app).put('/api/services').send(newServices);
|
||||
@@ -360,7 +360,7 @@ describe('API Endpoints', () => {
|
||||
test('should save config', async () => {
|
||||
const config = {
|
||||
theme: 'dark',
|
||||
domain: 'test.local'
|
||||
domain: 'test.local',
|
||||
};
|
||||
|
||||
const res = await request(app)
|
||||
|
||||
@@ -12,7 +12,7 @@ const credentialManager = require('../credential-manager');
|
||||
// Mock credential manager
|
||||
jest.mock('../credential-manager');
|
||||
jest.mock('../logger-utils', () => ({
|
||||
safeLog: jest.fn()
|
||||
safeLog: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('AuthManager', () => {
|
||||
@@ -166,8 +166,8 @@ describe('AuthManager', () => {
|
||||
expect(credentialManager.save).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/^auth\.apikey\./),
|
||||
expect.objectContaining({
|
||||
keySecret: expect.any(String)
|
||||
})
|
||||
keySecret: expect.any(String),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -179,8 +179,8 @@ describe('AuthManager', () => {
|
||||
expect.objectContaining({
|
||||
name: 'test-key',
|
||||
scopes: ['read'],
|
||||
createdAt: expect.any(String)
|
||||
})
|
||||
createdAt: expect.any(String),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -210,12 +210,12 @@ describe('AuthManager', () => {
|
||||
|
||||
// Mock credential manager to return the stored key
|
||||
credentialManager.get.mockResolvedValueOnce({
|
||||
keySecret: key.split('_')[2]
|
||||
keySecret: key.split('_')[2],
|
||||
});
|
||||
credentialManager.get.mockResolvedValueOnce({
|
||||
name: 'test-key',
|
||||
scopes: ['read', 'write'],
|
||||
createdAt: new Date().toISOString()
|
||||
createdAt: new Date().toISOString(),
|
||||
});
|
||||
|
||||
const validated = await authManager.validateAPIKey(key);
|
||||
@@ -239,7 +239,7 @@ describe('AuthManager', () => {
|
||||
});
|
||||
|
||||
test('should reject non-existent API key', async () => {
|
||||
const fakeKey = 'dk_' + crypto.randomBytes(16).toString('hex') + '_' + crypto.randomBytes(32).toString('hex');
|
||||
const fakeKey = `dk_${ crypto.randomBytes(16).toString('hex') }_${ crypto.randomBytes(32).toString('hex')}`;
|
||||
credentialManager.get.mockResolvedValue(null); // Key doesn't exist
|
||||
|
||||
const validated = await authManager.validateAPIKey(fakeKey);
|
||||
@@ -252,7 +252,7 @@ describe('AuthManager', () => {
|
||||
|
||||
credentialManager.get.mockResolvedValueOnce({
|
||||
keySecret: key.split('_')[2],
|
||||
revoked: true // Key is revoked
|
||||
revoked: true, // Key is revoked
|
||||
});
|
||||
|
||||
const validated = await authManager.validateAPIKey(key);
|
||||
@@ -278,7 +278,7 @@ describe('AuthManager', () => {
|
||||
const { id } = await authManager.generateAPIKey('test-key');
|
||||
|
||||
credentialManager.get.mockResolvedValue({
|
||||
keySecret: 'test-secret'
|
||||
keySecret: 'test-secret',
|
||||
});
|
||||
|
||||
const revoked = await authManager.revokeAPIKey(id);
|
||||
@@ -288,8 +288,8 @@ describe('AuthManager', () => {
|
||||
`auth.apikey.${id}`,
|
||||
expect.objectContaining({
|
||||
revoked: true,
|
||||
revokedAt: expect.any(String)
|
||||
})
|
||||
revokedAt: expect.any(String),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -305,19 +305,19 @@ describe('AuthManager', () => {
|
||||
test('should list all API keys with metadata', async () => {
|
||||
credentialManager.list.mockResolvedValue([
|
||||
'auth.metadata.key1',
|
||||
'auth.metadata.key2'
|
||||
'auth.metadata.key2',
|
||||
]);
|
||||
|
||||
credentialManager.get.mockResolvedValueOnce({
|
||||
name: 'Key 1',
|
||||
scopes: ['read'],
|
||||
createdAt: '2026-01-01T00:00:00Z'
|
||||
createdAt: '2026-01-01T00:00:00Z',
|
||||
});
|
||||
|
||||
credentialManager.get.mockResolvedValueOnce({
|
||||
name: 'Key 2',
|
||||
scopes: ['read', 'write'],
|
||||
createdAt: '2026-01-02T00:00:00Z'
|
||||
createdAt: '2026-01-02T00:00:00Z',
|
||||
});
|
||||
|
||||
const keys = await authManager.listAPIKeys();
|
||||
|
||||
@@ -198,7 +198,7 @@ describe('cleanupOldBackups', () => {
|
||||
name: 'daily',
|
||||
status: 'success',
|
||||
timestamp: new Date(Date.now() - i * 86400000).toISOString(),
|
||||
locations: [{ type: 'local', path: `/tmp/fake-${i}.backup` }]
|
||||
locations: [{ type: 'local', path: `/tmp/fake-${i}.backup` }],
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@ describe('Config Routes', () => {
|
||||
const validConfig = {
|
||||
tld: 'sami',
|
||||
theme: 'dark',
|
||||
timezone: 'America/New_York'
|
||||
timezone: 'America/New_York',
|
||||
};
|
||||
|
||||
const res = await request(app)
|
||||
@@ -76,7 +76,7 @@ describe('Config Routes', () => {
|
||||
test('should return 400 for config with invalid field values', async () => {
|
||||
const invalidConfig = {
|
||||
tld: 123, // tld must be a string
|
||||
dns: 'not-an-object' // dns must be an object
|
||||
dns: 'not-an-object', // dns must be an object
|
||||
};
|
||||
|
||||
const res = await request(app)
|
||||
|
||||
@@ -68,7 +68,7 @@ describe('store', () => {
|
||||
'key-with-dashes',
|
||||
'key_with_underscores',
|
||||
'key:with:colons',
|
||||
'key/with/slashes'
|
||||
'key/with/slashes',
|
||||
];
|
||||
|
||||
for (const key of specialKeys) {
|
||||
@@ -83,8 +83,8 @@ describe('store', () => {
|
||||
'password!@#$%^&*()',
|
||||
'token\nwith\nnewlines',
|
||||
'json{"key":"value"}',
|
||||
'unicode=ƒöÉ=ƒöæG£à',
|
||||
'quotes"and\'apostrophes'
|
||||
'unicode=<EFBFBD><EFBFBD><EFBFBD>=<3D><><EFBFBD>G<EFBFBD><47>',
|
||||
'quotes"and\'apostrophes',
|
||||
];
|
||||
|
||||
for (let i = 0; i < specialValues.length; i++) {
|
||||
@@ -210,7 +210,7 @@ describe('getMetadata', () => {
|
||||
description: 'API Key',
|
||||
service: 'GitHub',
|
||||
expiresAt: '2026-12-31',
|
||||
createdBy: 'admin'
|
||||
createdBy: 'admin',
|
||||
};
|
||||
|
||||
await credentialManager.store('meta.complex', 'value', metadata);
|
||||
@@ -328,7 +328,7 @@ describe('Concurrent Access', () => {
|
||||
const promises = [
|
||||
credentialManager.store('concurrent.key', 'value1'),
|
||||
credentialManager.store('concurrent.key', 'value2'),
|
||||
credentialManager.store('concurrent.key', 'value3')
|
||||
credentialManager.store('concurrent.key', 'value3'),
|
||||
];
|
||||
|
||||
await Promise.all(promises);
|
||||
@@ -359,7 +359,7 @@ describe('Concurrent Access', () => {
|
||||
const promises = [
|
||||
credentialManager.retrieve('readwrite.key'),
|
||||
credentialManager.store('readwrite.key', 'updated'),
|
||||
credentialManager.retrieve('readwrite.key')
|
||||
credentialManager.retrieve('readwrite.key'),
|
||||
];
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
@@ -496,7 +496,7 @@ describe('Credential Manager - Extended Coverage', () => {
|
||||
const promises = [
|
||||
credentialManager.delete('delete.concurrent'),
|
||||
credentialManager.delete('delete.concurrent'),
|
||||
credentialManager.delete('delete.concurrent')
|
||||
credentialManager.delete('delete.concurrent'),
|
||||
];
|
||||
|
||||
// Should not throw
|
||||
@@ -532,7 +532,7 @@ describe('Credential Manager - Extended Coverage', () => {
|
||||
});
|
||||
|
||||
test('should handle unicode characters', async () => {
|
||||
const unicode = 'S+ásÑ+S+ûtòî =ƒÜÇ +à+¦+¡+¿+º +º+ä+¦+º+ä+à';
|
||||
const unicode = 'S+<EFBFBD>s<EFBFBD>+S+<2B>t<EFBFBD><74> =<3D><><EFBFBD> +<2B>+<2B>+<2B>+<2B>+<2B> +<2B>+<2B>+<2B>+<2B>+<2B>+<2B>';
|
||||
|
||||
const stored = await credentialManager.store('unicode.key', unicode);
|
||||
expect(stored).toBe(true);
|
||||
@@ -621,7 +621,7 @@ describe('Credential Manager - Extended Coverage', () => {
|
||||
description: 'Production database password',
|
||||
createdAt: new Date().toISOString(),
|
||||
owner: 'admin',
|
||||
tags: ['production', 'database']
|
||||
tags: ['production', 'database'],
|
||||
};
|
||||
|
||||
await credentialManager.store('meta.full', 'value', metadata);
|
||||
@@ -648,7 +648,7 @@ describe('Credential Manager - Extended Coverage', () => {
|
||||
test('should handle metadata with special characters', async () => {
|
||||
const metadata = {
|
||||
description: 'Test with "quotes" and \'apostrophes\'',
|
||||
notes: 'Line 1\nLine 2\tTabbed'
|
||||
notes: 'Line 1\nLine 2\tTabbed',
|
||||
};
|
||||
|
||||
await credentialManager.store('meta.special', 'value', metadata);
|
||||
|
||||
@@ -43,14 +43,14 @@ describe('encrypt / decrypt', () => {
|
||||
test('throws on tampered ciphertext', () => {
|
||||
const encrypted = cryptoUtils.encrypt('test');
|
||||
const parts = encrypted.split(':');
|
||||
parts[2] = 'AAAA' + parts[2].slice(4); // tamper with ciphertext
|
||||
parts[2] = `AAAA${ parts[2].slice(4)}`; // tamper with ciphertext
|
||||
expect(() => cryptoUtils.decrypt(parts.join(':'))).toThrow();
|
||||
});
|
||||
|
||||
test('throws on tampered authTag', () => {
|
||||
const encrypted = cryptoUtils.encrypt('test');
|
||||
const parts = encrypted.split(':');
|
||||
parts[1] = 'AAAA' + parts[1].slice(4); // tamper with auth tag
|
||||
parts[1] = `AAAA${ parts[1].slice(4)}`; // tamper with auth tag
|
||||
expect(() => cryptoUtils.decrypt(parts.join(':'))).toThrow();
|
||||
});
|
||||
|
||||
|
||||
@@ -151,7 +151,7 @@ describe('DockerSecurity Module', () => {
|
||||
});
|
||||
|
||||
test('should handle very long image names', () => {
|
||||
const longName = 'registry.example.com/team/project/' + 'a'.repeat(100) + ':v1.2.3';
|
||||
const longName = `registry.example.com/team/project/${ 'a'.repeat(100) }:v1.2.3`;
|
||||
|
||||
dockerSecurity.setTrustedDigest(longName, 'sha256:long');
|
||||
expect(dockerSecurity.config.trustedDigests[longName]).toBe('sha256:long');
|
||||
|
||||
@@ -202,7 +202,7 @@ describe('Edge Case Tests', () => {
|
||||
.send({
|
||||
id: 'path-traversal',
|
||||
name: 'Path Traversal',
|
||||
logo: '../../../../../../etc/passwd'
|
||||
logo: '../../../../../../etc/passwd',
|
||||
});
|
||||
|
||||
// Should handle safely
|
||||
@@ -255,7 +255,7 @@ describe('Edge Case Tests', () => {
|
||||
test('should handle bulk import of 200 services', async () => {
|
||||
const bulkServices = Array.from({ length: 200 }, (_, i) => ({
|
||||
id: `bulk-${i}`,
|
||||
name: `Bulk Service ${i}`
|
||||
name: `Bulk Service ${i}`,
|
||||
}));
|
||||
|
||||
const res = await request(app)
|
||||
@@ -277,7 +277,7 @@ describe('Edge Case Tests', () => {
|
||||
.send({
|
||||
id: 'large-data',
|
||||
name: 'Large Data',
|
||||
description: largeData
|
||||
description: largeData,
|
||||
});
|
||||
|
||||
// Might reject due to size
|
||||
@@ -290,7 +290,7 @@ describe('Edge Case Tests', () => {
|
||||
const promises = Array.from({ length: 20 }, (_, i) =>
|
||||
request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: `concurrent-${i}`, name: `Concurrent ${i}` })
|
||||
.send({ id: `concurrent-${i}`, name: `Concurrent ${i}` }),
|
||||
);
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
@@ -317,7 +317,7 @@ describe('Edge Case Tests', () => {
|
||||
// Simultaneously add again and delete
|
||||
const [addRes, deleteRes] = await Promise.all([
|
||||
request(app).post('/api/services').send({ id: 'race', name: 'Race 2' }),
|
||||
request(app).delete('/api/services/race')
|
||||
request(app).delete('/api/services/race'),
|
||||
]);
|
||||
|
||||
// One should succeed, states should be consistent
|
||||
@@ -331,7 +331,7 @@ describe('Edge Case Tests', () => {
|
||||
|
||||
const [res1, res2] = await Promise.all([
|
||||
request(app).put('/api/services').send(set1),
|
||||
request(app).put('/api/services').send(set2)
|
||||
request(app).put('/api/services').send(set2),
|
||||
]);
|
||||
|
||||
// Both operations should complete
|
||||
@@ -463,7 +463,7 @@ describe('Edge Case Tests', () => {
|
||||
|
||||
test('should handle double-encoded JSON', async () => {
|
||||
const doubleEncoded = JSON.stringify(
|
||||
JSON.stringify({ id: 'double', name: 'Double Encoded' })
|
||||
JSON.stringify({ id: 'double', name: 'Double Encoded' }),
|
||||
);
|
||||
|
||||
const res = await request(app)
|
||||
@@ -525,7 +525,7 @@ describe('Edge Case Tests', () => {
|
||||
|
||||
test('should handle configuration with nested arrays', async () => {
|
||||
const config = {
|
||||
nested: [[['deep', 'array'], ['values']], [['more']]]
|
||||
nested: [[['deep', 'array'], ['values']], [['more']]],
|
||||
};
|
||||
|
||||
const res = await request(app)
|
||||
@@ -558,7 +558,7 @@ describe('Edge Case Tests', () => {
|
||||
// Delete twice at once
|
||||
const [res1, res2] = await Promise.all([
|
||||
request(app).delete('/api/services/delete-me'),
|
||||
request(app).delete('/api/services/delete-me')
|
||||
request(app).delete('/api/services/delete-me'),
|
||||
]);
|
||||
|
||||
// One should succeed (200), one should fail (404)
|
||||
|
||||
@@ -37,25 +37,25 @@ describe('evaluateHealth', () => {
|
||||
|
||||
test('returns false when expectedBodyPattern regex does not match', () => {
|
||||
expect(healthChecker.evaluateHealth(200, 'error occurred', {
|
||||
expectedBodyPattern: 'ok|healthy'
|
||||
expectedBodyPattern: 'ok|healthy',
|
||||
})).toBe(false);
|
||||
});
|
||||
|
||||
test('returns true when expectedBodyPattern regex matches', () => {
|
||||
expect(healthChecker.evaluateHealth(200, 'status: healthy', {
|
||||
expectedBodyPattern: 'healthy'
|
||||
expectedBodyPattern: 'healthy',
|
||||
})).toBe(true);
|
||||
});
|
||||
|
||||
test('returns false when expectedBodyContains text is missing', () => {
|
||||
expect(healthChecker.evaluateHealth(200, 'some response', {
|
||||
expectedBodyContains: 'healthy'
|
||||
expectedBodyContains: 'healthy',
|
||||
})).toBe(false);
|
||||
});
|
||||
|
||||
test('returns true when expectedBodyContains text is present', () => {
|
||||
expect(healthChecker.evaluateHealth(200, 'service is healthy', {
|
||||
expectedBodyContains: 'healthy'
|
||||
expectedBodyContains: 'healthy',
|
||||
})).toBe(true);
|
||||
});
|
||||
|
||||
@@ -64,21 +64,21 @@ describe('evaluateHealth', () => {
|
||||
expect(healthChecker.evaluateHealth(200, 'healthy ok', {
|
||||
expectedStatusCodes: [200],
|
||||
expectedBodyPattern: 'healthy',
|
||||
expectedBodyContains: 'ok'
|
||||
expectedBodyContains: 'ok',
|
||||
})).toBe(true);
|
||||
|
||||
// Status fails
|
||||
expect(healthChecker.evaluateHealth(500, 'healthy ok', {
|
||||
expectedStatusCodes: [200],
|
||||
expectedBodyPattern: 'healthy',
|
||||
expectedBodyContains: 'ok'
|
||||
expectedBodyContains: 'ok',
|
||||
})).toBe(false);
|
||||
|
||||
// Body pattern fails
|
||||
expect(healthChecker.evaluateHealth(200, 'error', {
|
||||
expectedStatusCodes: [200],
|
||||
expectedBodyPattern: 'healthy',
|
||||
expectedBodyContains: 'error'
|
||||
expectedBodyContains: 'error',
|
||||
})).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -9,7 +9,7 @@ const {
|
||||
validateServiceConfig,
|
||||
sanitizeString,
|
||||
isValidPort,
|
||||
isPrivateIP
|
||||
isPrivateIP,
|
||||
} = require('../input-validator');
|
||||
|
||||
// Helper: extract .errors from ValidationError
|
||||
|
||||
@@ -61,7 +61,7 @@ describe('Integration Tests', () => {
|
||||
id: 'test-app',
|
||||
name: 'Test Application',
|
||||
logo: '/assets/test.png',
|
||||
url: 'https://test.test.local'
|
||||
url: 'https://test.test.local',
|
||||
};
|
||||
|
||||
const addRes = await request(app)
|
||||
@@ -81,7 +81,7 @@ describe('Integration Tests', () => {
|
||||
const updatedServices = [{
|
||||
...newService,
|
||||
status: 'online',
|
||||
responseTime: 150
|
||||
responseTime: 150,
|
||||
}];
|
||||
|
||||
const updateRes = await request(app)
|
||||
@@ -116,7 +116,7 @@ describe('Integration Tests', () => {
|
||||
name: template.name,
|
||||
logo: template.logo,
|
||||
port: 8096,
|
||||
subdomain: 'jellyfin'
|
||||
subdomain: 'jellyfin',
|
||||
};
|
||||
|
||||
// Step 3: Add configured service
|
||||
@@ -129,7 +129,7 @@ describe('Integration Tests', () => {
|
||||
// Step 4: Verify service is listed
|
||||
const servicesRes = await request(app).get('/api/services');
|
||||
expect(servicesRes.body).toContainEqual(
|
||||
expect.objectContaining({ id: 'jellyfin' })
|
||||
expect.objectContaining({ id: 'jellyfin' }),
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -140,11 +140,11 @@ describe('Integration Tests', () => {
|
||||
const services = Array.from({ length: 5 }, (_, i) => ({
|
||||
id: `concurrent-${i}`,
|
||||
name: `Concurrent Service ${i}`,
|
||||
logo: `/assets/service-${i}.png`
|
||||
logo: `/assets/service-${i}.png`,
|
||||
}));
|
||||
|
||||
const deployPromises = services.map(service =>
|
||||
request(app).post('/api/services').send(service)
|
||||
request(app).post('/api/services').send(service),
|
||||
);
|
||||
|
||||
const results = await Promise.all(deployPromises);
|
||||
@@ -167,7 +167,7 @@ describe('Integration Tests', () => {
|
||||
const bulkServices = [
|
||||
{ id: 'plex', name: 'Plex' },
|
||||
{ id: 'jellyfin', name: 'Jellyfin' },
|
||||
{ id: 'emby', name: 'Emby' }
|
||||
{ id: 'emby', name: 'Emby' },
|
||||
];
|
||||
|
||||
const importRes = await request(app)
|
||||
@@ -180,7 +180,7 @@ describe('Integration Tests', () => {
|
||||
const updatedServices = [
|
||||
{ id: 'plex', name: 'Plex', status: 'online' },
|
||||
{ id: 'jellyfin', name: 'Jellyfin' },
|
||||
{ id: 'emby', name: 'Emby' }
|
||||
{ id: 'emby', name: 'Emby' },
|
||||
];
|
||||
|
||||
await request(app).put('/api/services').send(updatedServices);
|
||||
@@ -219,7 +219,7 @@ describe('Integration Tests', () => {
|
||||
const config = {
|
||||
domain: 'example.local',
|
||||
theme: 'dark',
|
||||
enableHealthCheck: false
|
||||
enableHealthCheck: false,
|
||||
};
|
||||
|
||||
const configRes = await request(app)
|
||||
@@ -232,7 +232,7 @@ describe('Integration Tests', () => {
|
||||
const service = {
|
||||
id: 'test',
|
||||
name: 'Test Service',
|
||||
subdomain: 'test'
|
||||
subdomain: 'test',
|
||||
};
|
||||
|
||||
await request(app).post('/api/services').send(service);
|
||||
@@ -282,7 +282,7 @@ describe('Integration Tests', () => {
|
||||
const service = {
|
||||
id: firstTemplateId,
|
||||
name: singleTemplateRes.body.template.name,
|
||||
logo: singleTemplateRes.body.template.logo
|
||||
logo: singleTemplateRes.body.template.logo,
|
||||
};
|
||||
|
||||
const deployRes = await request(app)
|
||||
@@ -310,7 +310,7 @@ describe('Integration Tests', () => {
|
||||
name: 'Plex Production',
|
||||
logo: template.logo,
|
||||
port: 32400,
|
||||
subdomain: 'plex'
|
||||
subdomain: 'plex',
|
||||
};
|
||||
|
||||
const deployRes = await request(app)
|
||||
@@ -322,7 +322,7 @@ describe('Integration Tests', () => {
|
||||
// Verify service exists
|
||||
const servicesRes = await request(app).get('/api/services');
|
||||
expect(servicesRes.body).toContainEqual(
|
||||
expect.objectContaining({ id: 'plex-prod' })
|
||||
expect.objectContaining({ id: 'plex-prod' }),
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -367,7 +367,7 @@ describe('Integration Tests', () => {
|
||||
// Start with empty state
|
||||
const initialServices = [
|
||||
{ id: 'base1', name: 'Base 1' },
|
||||
{ id: 'base2', name: 'Base 2' }
|
||||
{ id: 'base2', name: 'Base 2' },
|
||||
];
|
||||
|
||||
await request(app).put('/api/services').send(initialServices);
|
||||
@@ -377,7 +377,7 @@ describe('Integration Tests', () => {
|
||||
request(app).post('/api/services').send({ id: 'new1', name: 'New 1' }),
|
||||
request(app).post('/api/services').send({ id: 'new2', name: 'New 2' }),
|
||||
request(app).delete('/api/services/base1'),
|
||||
request(app).post('/api/services').send({ id: 'new3', name: 'New 3' })
|
||||
request(app).post('/api/services').send({ id: 'new3', name: 'New 3' }),
|
||||
];
|
||||
|
||||
await Promise.all(operations);
|
||||
@@ -426,7 +426,7 @@ describe('Integration Tests', () => {
|
||||
const selectedApps = mediaApps.map(id => ({
|
||||
id,
|
||||
name: templates[id].name,
|
||||
logo: templates[id].logo
|
||||
logo: templates[id].logo,
|
||||
}));
|
||||
|
||||
// Step 3: Deploy all media apps
|
||||
@@ -451,7 +451,7 @@ describe('Integration Tests', () => {
|
||||
const config = {
|
||||
domain: 'homelab.local',
|
||||
theme: 'dark',
|
||||
enableHealthCheck: true
|
||||
enableHealthCheck: true,
|
||||
};
|
||||
|
||||
await request(app).post('/api/config').send(config);
|
||||
@@ -460,7 +460,7 @@ describe('Integration Tests', () => {
|
||||
const existingServices = [
|
||||
{ id: 'router', name: 'Router', logo: '/assets/router.png' },
|
||||
{ id: 'nas', name: 'NAS', logo: '/assets/nas.png' },
|
||||
{ id: 'pihole', name: 'Pi-hole', logo: '/assets/pihole.png' }
|
||||
{ id: 'pihole', name: 'Pi-hole', logo: '/assets/pihole.png' },
|
||||
];
|
||||
|
||||
await request(app).put('/api/services').send(existingServices);
|
||||
@@ -484,7 +484,7 @@ describe('Integration Tests', () => {
|
||||
const oldServices = [
|
||||
{ id: 'old1', name: 'Old Service 1' },
|
||||
{ id: 'old2', name: 'Old Service 2' },
|
||||
{ id: 'keep', name: 'Keep This' }
|
||||
{ id: 'keep', name: 'Keep This' },
|
||||
];
|
||||
|
||||
await request(app).put('/api/services').send(oldServices);
|
||||
|
||||
@@ -12,7 +12,7 @@ describe('logger-utils', () => {
|
||||
username: 'admin',
|
||||
password: 'secret123',
|
||||
apiKey: 'abc-def-ghi',
|
||||
token: 'xyz123'
|
||||
token: 'xyz123',
|
||||
};
|
||||
|
||||
const result = sanitizeForLog(input);
|
||||
@@ -29,9 +29,9 @@ describe('logger-utils', () => {
|
||||
name: 'Alice',
|
||||
credentials: {
|
||||
password: 'secret',
|
||||
token: 'abc123'
|
||||
}
|
||||
}
|
||||
token: 'abc123',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = sanitizeForLog(input);
|
||||
@@ -44,7 +44,7 @@ describe('logger-utils', () => {
|
||||
test('should handle arrays', () => {
|
||||
const input = [
|
||||
{ name: 'user1', password: 'pass1' },
|
||||
{ name: 'user2', secret: 'pass2' }
|
||||
{ name: 'user2', secret: 'pass2' },
|
||||
];
|
||||
|
||||
const result = sanitizeForLog(input);
|
||||
@@ -63,7 +63,7 @@ describe('logger-utils', () => {
|
||||
test('should support additional sensitive keys', () => {
|
||||
const input = {
|
||||
email: 'user@example.com',
|
||||
ssn: '123-45-6789'
|
||||
ssn: '123-45-6789',
|
||||
};
|
||||
|
||||
const result = sanitizeForLog(input, ['ssn']);
|
||||
@@ -76,7 +76,7 @@ describe('logger-utils', () => {
|
||||
const input = {
|
||||
PASSWORD: 'secret',
|
||||
ApiKey: 'key123',
|
||||
Bearer_Token: 'token456'
|
||||
Bearer_Token: 'token456',
|
||||
};
|
||||
|
||||
const result = sanitizeForLog(input);
|
||||
@@ -125,7 +125,7 @@ describe('logger-utils', () => {
|
||||
test('should create safe log object with message and sanitized data', () => {
|
||||
const result = safeLog('User login', {
|
||||
username: 'alice',
|
||||
password: 'secret123'
|
||||
password: 'secret123',
|
||||
});
|
||||
|
||||
expect(result).toHaveProperty('message', 'User login');
|
||||
|
||||
@@ -72,8 +72,8 @@ describe('Notification Routes', () => {
|
||||
.send({
|
||||
events: {
|
||||
containerDown: true,
|
||||
containerUp: false
|
||||
}
|
||||
containerUp: false,
|
||||
},
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
@@ -87,9 +87,9 @@ describe('Notification Routes', () => {
|
||||
providers: {
|
||||
discord: {
|
||||
enabled: true,
|
||||
webhookUrl: 'not-a-valid-url'
|
||||
}
|
||||
}
|
||||
webhookUrl: 'not-a-valid-url',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
@@ -102,9 +102,9 @@ describe('Notification Routes', () => {
|
||||
providers: {
|
||||
ntfy: {
|
||||
enabled: true,
|
||||
topic: 'invalid topic with spaces!!!'
|
||||
}
|
||||
}
|
||||
topic: 'invalid topic with spaces!!!',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
|
||||
@@ -27,7 +27,7 @@ function makeStat(cpu = 10, memory = 50, timestamp = new Date().toISOString()) {
|
||||
memory: { usage: memory * 1024 * 1024, limit: 1024 * 1024 * 1024, percent: memory, usageMB: memory, limitMB: 1024 },
|
||||
network: { rxBytes: 0, txBytes: 0, rxMB: 0, txMB: 0 },
|
||||
disk: { readBytes: 0, writeBytes: 0, readMB: 0, writeMB: 0 },
|
||||
pids: 5
|
||||
pids: 5,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -95,7 +95,7 @@ describe('getAggregatedStats', () => {
|
||||
const now = new Date().toISOString();
|
||||
resourceMonitor.stats.set('c1', {
|
||||
name: '/app',
|
||||
history: [makeStat(10, 50, now), makeStat(30, 50, now), makeStat(20, 50, now)]
|
||||
history: [makeStat(10, 50, now), makeStat(30, 50, now), makeStat(20, 50, now)],
|
||||
});
|
||||
const agg = resourceMonitor.getAggregatedStats('c1', 24);
|
||||
expect(agg.cpu.avg).toBe(20);
|
||||
@@ -107,7 +107,7 @@ describe('getAggregatedStats', () => {
|
||||
const now = new Date().toISOString();
|
||||
resourceMonitor.stats.set('c1', {
|
||||
name: '/app',
|
||||
history: [makeStat(10, 40, now), makeStat(10, 60, now), makeStat(10, 80, now)]
|
||||
history: [makeStat(10, 40, now), makeStat(10, 60, now), makeStat(10, 80, now)],
|
||||
});
|
||||
const agg = resourceMonitor.getAggregatedStats('c1', 24);
|
||||
expect(agg.memory.avg).toBe(60);
|
||||
@@ -239,7 +239,7 @@ describe('exportStats / importStats', () => {
|
||||
test('import restores stats from backup', () => {
|
||||
const backup = {
|
||||
stats: { 'c1': { name: '/app', history: [makeStat()] } },
|
||||
alerts: { 'c1': { enabled: true, cpuThreshold: 80 } }
|
||||
alerts: { 'c1': { enabled: true, cpuThreshold: 80 } },
|
||||
};
|
||||
resourceMonitor.importStats(backup);
|
||||
expect(resourceMonitor.stats.has('c1')).toBe(true);
|
||||
|
||||
@@ -150,7 +150,7 @@ describe('Sites Route Security', () => {
|
||||
.post('/api/site/external')
|
||||
.send({
|
||||
subdomain: 'test',
|
||||
externalUrl: 'https://evil.com/path{inject}'
|
||||
externalUrl: 'https://evil.com/path{inject}',
|
||||
});
|
||||
|
||||
// Should be rejected — either 400 (our validation) or 500 (URL constructor throws on {})
|
||||
@@ -164,7 +164,7 @@ describe('Sites Route Security', () => {
|
||||
.post('/api/site/external')
|
||||
.send({
|
||||
subdomain: 'test',
|
||||
externalUrl: 'https://evil.com/path\nreverse_proxy malicious:1234'
|
||||
externalUrl: 'https://evil.com/path\nreverse_proxy malicious:1234',
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
@@ -183,7 +183,7 @@ describe('Sites Route Security', () => {
|
||||
.post('/api/site/external')
|
||||
.send({
|
||||
subdomain: '../etc/passwd',
|
||||
externalUrl: 'https://example.com'
|
||||
externalUrl: 'https://example.com',
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
@@ -205,7 +205,7 @@ describe('Error Logs — No Stack Trace Leak', () => {
|
||||
'[2026-03-07 12:01:00] dns: DNS timeout',
|
||||
'Error: connect ECONNREFUSED 192.168.1.1:5380',
|
||||
' at TCPConnectWrap.afterConnect [as oncomplete] (node:net:1234:16)',
|
||||
'================================================================================'
|
||||
'================================================================================',
|
||||
].join('\n');
|
||||
// Write to the server's error log file location
|
||||
// The server uses ctx.ERROR_LOG_FILE — we need to check what that resolves to
|
||||
@@ -334,10 +334,10 @@ describe('Backup Security', () => {
|
||||
files: {
|
||||
encryptionKey: {
|
||||
type: 'text',
|
||||
content: 'malicious-key-data'
|
||||
}
|
||||
}
|
||||
}
|
||||
content: 'malicious-key-data',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// The encryptionKey should be skipped (not in fileMapping)
|
||||
@@ -392,8 +392,8 @@ describe('Custom Volume Path Validation', () => {
|
||||
port: '32400',
|
||||
customVolumes: [{
|
||||
containerPath: '/config',
|
||||
hostPath: '/etc/shadow'
|
||||
}]
|
||||
hostPath: '/etc/shadow',
|
||||
}],
|
||||
});
|
||||
|
||||
// The deploy will likely fail for other reasons (no Docker, etc.)
|
||||
@@ -414,7 +414,7 @@ describe('Logo Delete Path Traversal', () => {
|
||||
// Write config with a malicious logo path
|
||||
const configWithMaliciousLogo = {
|
||||
customLogo: '/assets/../../etc/passwd',
|
||||
customLogoDark: '/assets/../../../root/.ssh/id_rsa'
|
||||
customLogoDark: '/assets/../../../root/.ssh/id_rsa',
|
||||
};
|
||||
await fsp.writeFile(testConfigFile, JSON.stringify(configWithMaliciousLogo), 'utf8');
|
||||
|
||||
@@ -439,7 +439,7 @@ describe('DNS Server SSRF Prevention', () => {
|
||||
.query({
|
||||
domain: 'test.sami',
|
||||
type: 'A',
|
||||
server: '169.254.169.254' // AWS metadata endpoint
|
||||
server: '169.254.169.254', // AWS metadata endpoint
|
||||
});
|
||||
|
||||
// Must never succeed — 400 (server rejected), 401 (no token), or 500 (dns not configured in test)
|
||||
@@ -452,7 +452,7 @@ describe('DNS Server SSRF Prevention', () => {
|
||||
.send({
|
||||
domain: 'test.sami',
|
||||
ipAddress: '192.168.1.1',
|
||||
server: '10.0.0.1' // Not a configured DNS server
|
||||
server: '10.0.0.1', // Not a configured DNS server
|
||||
});
|
||||
|
||||
expect(res.statusCode).not.toBe(200);
|
||||
@@ -463,7 +463,7 @@ describe('DNS Server SSRF Prevention', () => {
|
||||
.get('/api/dns/resolve')
|
||||
.query({
|
||||
domain: 'test.sami',
|
||||
server: '127.0.0.1'
|
||||
server: '127.0.0.1',
|
||||
});
|
||||
|
||||
expect(res.statusCode).not.toBe(200);
|
||||
@@ -503,7 +503,7 @@ describe('HTTP Fetch Response Size Limit', () => {
|
||||
test('server should define MAX_RESPONSE_SIZE constant', () => {
|
||||
// Read server.js and verify the limit is defined
|
||||
const serverSource = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'server.js'), 'utf8'
|
||||
path.join(__dirname, '..', 'server.js'), 'utf8',
|
||||
);
|
||||
expect(serverSource).toContain('MAX_RESPONSE_SIZE');
|
||||
expect(serverSource).toContain('10 * 1024 * 1024');
|
||||
@@ -516,7 +516,7 @@ describe('HTTP Fetch Response Size Limit', () => {
|
||||
describe('Middleware Security', () => {
|
||||
test('middleware should set Secure flag on cookies', () => {
|
||||
const middlewareSource = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'middleware.js'), 'utf8'
|
||||
path.join(__dirname, '..', 'middleware.js'), 'utf8',
|
||||
);
|
||||
// Verify the Set-Cookie string includes Secure
|
||||
expect(middlewareSource).toContain('; Secure;');
|
||||
@@ -529,7 +529,7 @@ describe('Middleware Security', () => {
|
||||
describe('Config Save Atomicity', () => {
|
||||
test('saveConfig should use state manager for locking', () => {
|
||||
const serverSource = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'server.js'), 'utf8'
|
||||
path.join(__dirname, '..', 'server.js'), 'utf8',
|
||||
);
|
||||
// Verify saveConfig uses configStateManager.update (not raw fs.writeFile)
|
||||
expect(serverSource).toContain('configStateManager.update');
|
||||
@@ -542,7 +542,7 @@ describe('Config Save Atomicity', () => {
|
||||
describe('External URL Security', () => {
|
||||
test('sites.js should validate URL components for unsafe chars', () => {
|
||||
const sitesSource = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'sites.js'), 'utf8'
|
||||
path.join(__dirname, '..', 'routes', 'sites.js'), 'utf8',
|
||||
);
|
||||
// Verify the unsafe character regex exists
|
||||
expect(sitesSource).toContain('unsafeCaddyChars');
|
||||
@@ -556,7 +556,7 @@ describe('External URL Security', () => {
|
||||
describe('Credential Manager File Locking', () => {
|
||||
test('credential-manager should use proper-lockfile', () => {
|
||||
const cmSource = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'credential-manager.js'), 'utf8'
|
||||
path.join(__dirname, '..', 'credential-manager.js'), 'utf8',
|
||||
);
|
||||
expect(cmSource).toContain('proper-lockfile');
|
||||
expect(cmSource).toContain('_lockedUpdate');
|
||||
@@ -569,7 +569,7 @@ describe('Credential Manager File Locking', () => {
|
||||
describe('TOTP Config File Security', () => {
|
||||
test('loadTotpConfig should delete secret from file data', () => {
|
||||
const serverSource = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'server.js'), 'utf8'
|
||||
path.join(__dirname, '..', 'server.js'), 'utf8',
|
||||
);
|
||||
// Verify the secret deletion exists in loadTotpConfig
|
||||
expect(serverSource).toContain('delete loaded.secret');
|
||||
@@ -577,7 +577,7 @@ describe('TOTP Config File Security', () => {
|
||||
|
||||
test('totp verify-setup should not write secret to config file', () => {
|
||||
const totpSource = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'auth', 'totp.js'), 'utf8'
|
||||
path.join(__dirname, '..', 'routes', 'auth', 'totp.js'), 'utf8',
|
||||
);
|
||||
// Verify totpConfig.secret assignment is NOT present
|
||||
expect(totpSource).not.toContain('totpConfig.secret = pendingSecret');
|
||||
@@ -591,7 +591,7 @@ describe('TOTP Config File Security', () => {
|
||||
describe('Helpers — Volume Security', () => {
|
||||
test('helpers.js should validate hostPath against allowed roots', () => {
|
||||
const helpersSource = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'apps', 'helpers.js'), 'utf8'
|
||||
path.join(__dirname, '..', 'routes', 'apps', 'helpers.js'), 'utf8',
|
||||
);
|
||||
expect(helpersSource).toContain('allowedRoots');
|
||||
expect(helpersSource).toContain('platformPaths.dockerData');
|
||||
@@ -605,7 +605,7 @@ describe('Helpers — Volume Security', () => {
|
||||
describe('Error Logs — Response Format', () => {
|
||||
test('errorlogs.js should not include details field', () => {
|
||||
const source = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'errorlogs.js'), 'utf8'
|
||||
path.join(__dirname, '..', 'routes', 'errorlogs.js'), 'utf8',
|
||||
);
|
||||
// The parsed log object should only have timestamp, context, error
|
||||
// NOT details (which contains stack traces)
|
||||
@@ -622,7 +622,7 @@ describe('Error Logs — Response Format', () => {
|
||||
describe('Assets — Logo Path Safety', () => {
|
||||
test('assets.js should use path.basename for logo filename extraction', () => {
|
||||
const source = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'config', 'assets.js'), 'utf8'
|
||||
path.join(__dirname, '..', 'routes', 'config', 'assets.js'), 'utf8',
|
||||
);
|
||||
expect(source).toContain('path.basename(logoPath)');
|
||||
// Should NOT use string replace for path extraction
|
||||
@@ -636,7 +636,7 @@ describe('Assets — Logo Path Safety', () => {
|
||||
describe('Backup — Encryption Key Exclusion', () => {
|
||||
test('backup.js should not include encryptionKey in filesToBackup', () => {
|
||||
const source = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'config', 'backup.js'), 'utf8'
|
||||
path.join(__dirname, '..', 'routes', 'config', 'backup.js'), 'utf8',
|
||||
);
|
||||
// Should have a comment about deliberate exclusion
|
||||
expect(source).toContain('encryptionKey deliberately excluded');
|
||||
@@ -646,7 +646,7 @@ describe('Backup — Encryption Key Exclusion', () => {
|
||||
|
||||
test('backup.js restore fileMapping should not include encryptionKey', () => {
|
||||
const source = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'config', 'backup.js'), 'utf8'
|
||||
path.join(__dirname, '..', 'routes', 'config', 'backup.js'), 'utf8',
|
||||
);
|
||||
// The RESTORE route's fileMapping (after "encryptionKey excluded" comment) must not have it
|
||||
// The preview route's fileMapping is allowed to have it (informational only)
|
||||
@@ -659,7 +659,7 @@ describe('Backup — Encryption Key Exclusion', () => {
|
||||
|
||||
test('backup.js should require TOTP for sensitive restores', () => {
|
||||
const source = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'config', 'backup.js'), 'utf8'
|
||||
path.join(__dirname, '..', 'routes', 'config', 'backup.js'), 'utf8',
|
||||
);
|
||||
expect(source).toContain('sensitiveKeys');
|
||||
expect(source).toContain('totpCode');
|
||||
@@ -673,7 +673,7 @@ describe('Backup — Encryption Key Exclusion', () => {
|
||||
describe('DNS — Server Validation Function', () => {
|
||||
test('dns.js should define validateDnsServer', () => {
|
||||
const source = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'dns.js'), 'utf8'
|
||||
path.join(__dirname, '..', 'routes', 'dns.js'), 'utf8',
|
||||
);
|
||||
expect(source).toContain('function validateDnsServer');
|
||||
expect(source).toContain('configuredIps');
|
||||
@@ -687,7 +687,7 @@ describe('DNS — Server Validation Function', () => {
|
||||
describe('Containers — Verified Container Access', () => {
|
||||
test('containers.js update route should use getVerifiedContainer', () => {
|
||||
const source = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'containers.js'), 'utf8'
|
||||
path.join(__dirname, '..', 'routes', 'containers.js'), 'utf8',
|
||||
);
|
||||
// update and check-update should both use getVerifiedContainer
|
||||
const updateSection = source.substring(source.indexOf("'/:id/update'"));
|
||||
@@ -704,7 +704,7 @@ describe('Containers — Verified Container Access', () => {
|
||||
describe('Logs — Symlink Resolution', () => {
|
||||
test('logs.js should use realpath for symlink resolution', () => {
|
||||
const source = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'logs.js'), 'utf8'
|
||||
path.join(__dirname, '..', 'routes', 'logs.js'), 'utf8',
|
||||
);
|
||||
expect(source).toContain('fsp.realpath');
|
||||
expect(source).toContain('path.sep');
|
||||
@@ -712,7 +712,7 @@ describe('Logs — Symlink Resolution', () => {
|
||||
|
||||
test('logs.js container routes should verify container exists', () => {
|
||||
const source = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'logs.js'), 'utf8'
|
||||
path.join(__dirname, '..', 'routes', 'logs.js'), 'utf8',
|
||||
);
|
||||
// Both container/:id and stream/:id should have inspect + NotFoundError
|
||||
expect(source).toContain('container.inspect()');
|
||||
|
||||
@@ -85,7 +85,7 @@ describe('Sites Routes', () => {
|
||||
.send({
|
||||
subdomain: 'INVALID SUBDOMAIN!',
|
||||
targetUrl: 'https://example.com',
|
||||
name: 'Test'
|
||||
name: 'Test',
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
|
||||
@@ -29,7 +29,7 @@ describe('StateManager', () => {
|
||||
stateManager = new StateManager(testFile, {
|
||||
lockRetries: 20,
|
||||
lockRetryInterval: 50,
|
||||
lockTimeout: 15000
|
||||
lockTimeout: 15000,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -53,7 +53,7 @@ describe('StateManager', () => {
|
||||
test('write and read roundtrip', async () => {
|
||||
const testData = [
|
||||
{ id: '1', name: 'Test Service 1' },
|
||||
{ id: '2', name: 'Test Service 2' }
|
||||
{ id: '2', name: 'Test Service 2' },
|
||||
];
|
||||
|
||||
await stateManager.write(testData);
|
||||
@@ -88,7 +88,7 @@ describe('StateManager', () => {
|
||||
await stateManager.write([
|
||||
{ id: '1', name: 'Service 1' },
|
||||
{ id: '2', name: 'Service 2' },
|
||||
{ id: '3', name: 'Service 3' }
|
||||
{ id: '3', name: 'Service 3' },
|
||||
]);
|
||||
|
||||
await stateManager.removeItem('2');
|
||||
@@ -100,7 +100,7 @@ describe('StateManager', () => {
|
||||
|
||||
test('updateItem updates by ID', async () => {
|
||||
await stateManager.write([
|
||||
{ id: '1', name: 'Service 1', status: 'offline' }
|
||||
{ id: '1', name: 'Service 1', status: 'offline' },
|
||||
]);
|
||||
|
||||
await stateManager.updateItem('1', { status: 'online' });
|
||||
@@ -130,7 +130,7 @@ describe('StateManager', () => {
|
||||
stateManager.update(items => {
|
||||
items.push({ id: `service-${i}`, name: `Service ${i}` });
|
||||
return items;
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -187,7 +187,7 @@ describe('StateManager', () => {
|
||||
await expect(
|
||||
stateManager.update(() => {
|
||||
throw new Error('Test error');
|
||||
})
|
||||
}),
|
||||
).rejects.toThrow('Test error');
|
||||
});
|
||||
});
|
||||
@@ -229,7 +229,7 @@ describe('StateManager', () => {
|
||||
id: `service-${i}`,
|
||||
name: `Service ${i}`,
|
||||
url: `https://service-${i}.example.com`,
|
||||
status: 'online'
|
||||
status: 'online',
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -123,7 +123,7 @@ describe('configureAutoUpdate', () => {
|
||||
updateManager.configureAutoUpdate('c1', {
|
||||
enabled: true,
|
||||
schedule: 'daily',
|
||||
securityOnly: true
|
||||
securityOnly: true,
|
||||
});
|
||||
expect(updateManager.config.autoUpdate['c1'].schedule).toBe('daily');
|
||||
expect(updateManager.config.autoUpdate['c1'].securityOnly).toBe(true);
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -111,7 +111,7 @@ class AuditLogger {
|
||||
action: action || '',
|
||||
resource: resource || '',
|
||||
details: details || {},
|
||||
outcome: outcome || 'unknown'
|
||||
outcome: outcome || 'unknown',
|
||||
};
|
||||
|
||||
await this.stateManager.update(entries => {
|
||||
|
||||
@@ -40,10 +40,10 @@ class AuthManager {
|
||||
{
|
||||
...payload,
|
||||
iat: Math.floor(Date.now() / 1000),
|
||||
scope: payload.scope || ['read', 'write']
|
||||
scope: payload.scope || ['read', 'write'],
|
||||
},
|
||||
JWT_SECRET,
|
||||
{ expiresIn }
|
||||
{ expiresIn },
|
||||
);
|
||||
|
||||
// SECURITY: Log event only, never log the actual token
|
||||
@@ -67,7 +67,7 @@ class AuthManager {
|
||||
userId: decoded.sub,
|
||||
scope: decoded.scope || [],
|
||||
iat: decoded.iat,
|
||||
exp: decoded.exp
|
||||
exp: decoded.exp,
|
||||
};
|
||||
} catch (error) {
|
||||
if (error.name === 'TokenExpiredError') {
|
||||
@@ -111,7 +111,7 @@ class AuthManager {
|
||||
name,
|
||||
scopes,
|
||||
createdAt: new Date().toISOString(),
|
||||
lastUsed: null
|
||||
lastUsed: null,
|
||||
};
|
||||
|
||||
const metadataKey = `${API_KEY_METADATA_NAMESPACE}.${keyId}`;
|
||||
@@ -128,7 +128,7 @@ class AuthManager {
|
||||
id: keyId,
|
||||
name,
|
||||
scopes,
|
||||
createdAt: metadata.createdAt
|
||||
createdAt: metadata.createdAt,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('[AuthManager] API key generation failed:', error.message);
|
||||
@@ -179,7 +179,7 @@ class AuthManager {
|
||||
|
||||
// Update last used timestamp (non-blocking)
|
||||
this.updateLastUsed(keyId, metadata).catch(err =>
|
||||
console.error(`[AuthManager] Failed to update lastUsed for ${keyId}:`, err.message)
|
||||
console.error(`[AuthManager] Failed to update lastUsed for ${keyId}:`, err.message),
|
||||
);
|
||||
|
||||
console.log(`[AuthManager] API key verified: ${metadata.name} (${keyId})`);
|
||||
@@ -187,7 +187,7 @@ class AuthManager {
|
||||
return {
|
||||
keyId,
|
||||
scopes: metadata.scopes || [],
|
||||
name: metadata.name
|
||||
name: metadata.name,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('[AuthManager] API key verification failed:', error.message);
|
||||
@@ -282,7 +282,7 @@ class AuthManager {
|
||||
try {
|
||||
const updatedMetadata = {
|
||||
...metadata,
|
||||
lastUsed: new Date().toISOString()
|
||||
lastUsed: new Date().toISOString(),
|
||||
};
|
||||
|
||||
const metadataKey = `${API_KEY_METADATA_NAMESPACE}.${keyId}`;
|
||||
|
||||
@@ -165,7 +165,7 @@ class BackupManager extends EventEmitter {
|
||||
locations: savedLocations,
|
||||
encrypted: !!backup.encrypt,
|
||||
compressed: true,
|
||||
status: 'success'
|
||||
status: 'success',
|
||||
};
|
||||
|
||||
this.addToHistory(historyEntry);
|
||||
@@ -187,7 +187,7 @@ class BackupManager extends EventEmitter {
|
||||
timestamp: new Date().toISOString(),
|
||||
duration,
|
||||
status: 'failed',
|
||||
error: error.message
|
||||
error: error.message,
|
||||
};
|
||||
|
||||
this.addToHistory(historyEntry);
|
||||
@@ -205,7 +205,7 @@ class BackupManager extends EventEmitter {
|
||||
version: '1.0',
|
||||
timestamp: new Date().toISOString(),
|
||||
hostname: require('os').hostname(),
|
||||
data: {}
|
||||
data: {},
|
||||
};
|
||||
|
||||
for (const source of include) {
|
||||
@@ -332,10 +332,10 @@ class BackupManager extends EventEmitter {
|
||||
HostConfig: {
|
||||
Binds: [
|
||||
`${volumeName}:/volume:ro`,
|
||||
`${backupDir}:/backup`
|
||||
`${backupDir}:/backup`,
|
||||
],
|
||||
AutoRemove: true
|
||||
}
|
||||
AutoRemove: true,
|
||||
},
|
||||
});
|
||||
|
||||
// Start and wait for completion
|
||||
@@ -354,7 +354,7 @@ class BackupManager extends EventEmitter {
|
||||
path: backupFile,
|
||||
size: stats.size,
|
||||
timestamp: new Date().toISOString(),
|
||||
status: 'success'
|
||||
status: 'success',
|
||||
});
|
||||
}
|
||||
} catch (volumeError) {
|
||||
@@ -362,7 +362,7 @@ class BackupManager extends EventEmitter {
|
||||
backupResults.push({
|
||||
name: volume.Name,
|
||||
status: 'failed',
|
||||
error: volumeError.message
|
||||
error: volumeError.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -371,7 +371,7 @@ class BackupManager extends EventEmitter {
|
||||
timestamp: new Date().toISOString(),
|
||||
totalVolumes: volumes.length,
|
||||
successCount: backupResults.filter(r => r.status === 'success').length,
|
||||
volumes: backupResults
|
||||
volumes: backupResults,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('[BackupManager] Error backing up volumes:', error.message);
|
||||
@@ -425,10 +425,10 @@ class BackupManager extends EventEmitter {
|
||||
HostConfig: {
|
||||
Binds: [
|
||||
`${volumeName}:/volume`,
|
||||
`${backupDir}:/backup:ro`
|
||||
`${backupDir}:/backup:ro`,
|
||||
],
|
||||
AutoRemove: true
|
||||
}
|
||||
AutoRemove: true,
|
||||
},
|
||||
});
|
||||
|
||||
await container.start();
|
||||
@@ -442,7 +442,7 @@ class BackupManager extends EventEmitter {
|
||||
restoreResults.push({
|
||||
name: volumeName,
|
||||
status: 'success',
|
||||
timestamp: new Date().toISOString()
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
|
||||
console.log(`[BackupManager] Volume ${volumeName} restored successfully`);
|
||||
@@ -451,7 +451,7 @@ class BackupManager extends EventEmitter {
|
||||
restoreResults.push({
|
||||
name: volBackup.name,
|
||||
status: 'failed',
|
||||
error: restoreError.message
|
||||
error: restoreError.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -460,7 +460,7 @@ class BackupManager extends EventEmitter {
|
||||
timestamp: new Date().toISOString(),
|
||||
results: restoreResults,
|
||||
successCount: restoreResults.filter(r => r.status === 'success').length,
|
||||
failedCount: restoreResults.filter(r => r.status === 'failed').length
|
||||
failedCount: restoreResults.filter(r => r.status === 'failed').length,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -498,7 +498,7 @@ class BackupManager extends EventEmitter {
|
||||
|
||||
// Return: iv:authTag:encrypted (all base64)
|
||||
return Buffer.from(
|
||||
iv.toString('base64') + ':' + authTag.toString('base64') + ':' + encrypted.toString('base64')
|
||||
`${iv.toString('base64') }:${ authTag.toString('base64') }:${ encrypted.toString('base64')}`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -566,7 +566,7 @@ class BackupManager extends EventEmitter {
|
||||
return {
|
||||
type: 'local',
|
||||
path: filepath,
|
||||
size: data.length
|
||||
size: data.length,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -652,7 +652,7 @@ class BackupManager extends EventEmitter {
|
||||
this.emit('restore-complete', {
|
||||
backupId,
|
||||
restored,
|
||||
timestamp: new Date().toISOString()
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
|
||||
console.log('[BackupManager] Restore completed successfully');
|
||||
@@ -661,7 +661,7 @@ class BackupManager extends EventEmitter {
|
||||
this.emit('restore-failed', {
|
||||
backupId,
|
||||
error: error.message,
|
||||
timestamp: new Date().toISOString()
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
@@ -790,7 +790,7 @@ class BackupManager extends EventEmitter {
|
||||
|
||||
return {
|
||||
backups: {},
|
||||
defaultRetention: { keep: 7 }
|
||||
defaultRetention: { keep: 7 },
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ const CACHE_CONFIGS = {
|
||||
max: 500, // Max 500 different services
|
||||
ttl: 60 * 60 * 1000, // 1 hour TTL
|
||||
updateAgeOnGet: true, // Refresh TTL on access
|
||||
ttlAutopurge: true // Auto-cleanup expired entries
|
||||
ttlAutopurge: true, // Auto-cleanup expired entries
|
||||
},
|
||||
|
||||
// IP-based router sessions (Frontier NVG468MQ)
|
||||
@@ -21,7 +21,7 @@ const CACHE_CONFIGS = {
|
||||
max: 1000, // Support up to 1000 IP addresses
|
||||
ttl: 24 * 60 * 60 * 1000, // 24 hour TTL
|
||||
updateAgeOnGet: true,
|
||||
ttlAutopurge: true
|
||||
ttlAutopurge: true,
|
||||
},
|
||||
|
||||
// DNS server authentication tokens (Technitium)
|
||||
@@ -29,7 +29,7 @@ const CACHE_CONFIGS = {
|
||||
max: 50, // Max 50 DNS servers
|
||||
ttl: 6 * 60 * 60 * 1000, // 6 hour TTL (matches SESSION_TTL.DNS_TOKEN)
|
||||
updateAgeOnGet: false, // Don't refresh - tokens have fixed expiry
|
||||
ttlAutopurge: true
|
||||
ttlAutopurge: true,
|
||||
},
|
||||
|
||||
// Tailscale network status
|
||||
@@ -37,7 +37,7 @@ const CACHE_CONFIGS = {
|
||||
max: 1, // Only one status object
|
||||
ttl: 60 * 1000, // 1 minute TTL
|
||||
updateAgeOnGet: false,
|
||||
ttlAutopurge: true
|
||||
ttlAutopurge: true,
|
||||
},
|
||||
|
||||
// Tailscale API responses (devices, ACLs)
|
||||
@@ -45,8 +45,8 @@ const CACHE_CONFIGS = {
|
||||
max: 5, // devices + ACL + misc
|
||||
ttl: 5 * 60 * 1000, // 5 min (matches sync interval)
|
||||
updateAgeOnGet: false,
|
||||
ttlAutopurge: true
|
||||
}
|
||||
ttlAutopurge: true,
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@@ -17,15 +17,15 @@ const colors = {
|
||||
yellow: '\x1b[33m',
|
||||
blue: '\x1b[34m',
|
||||
cyan: '\x1b[36m',
|
||||
magenta: '\x1b[35m'
|
||||
magenta: '\x1b[35m',
|
||||
};
|
||||
|
||||
let testResults = {
|
||||
const testResults = {
|
||||
passed: 0,
|
||||
failed: 0,
|
||||
warnings: 0,
|
||||
total: 0,
|
||||
details: []
|
||||
details: [],
|
||||
};
|
||||
|
||||
function log(message, color = 'reset') {
|
||||
@@ -62,7 +62,7 @@ async function makeRequest(path, options = {}) {
|
||||
path: url.pathname + url.search,
|
||||
method: options.method || 'GET',
|
||||
headers: options.headers || {},
|
||||
timeout: options.timeout || 10000
|
||||
timeout: options.timeout || 10000,
|
||||
};
|
||||
|
||||
const req = http.request(requestOptions, (res) => {
|
||||
@@ -74,7 +74,7 @@ async function makeRequest(path, options = {}) {
|
||||
headers: res.headers,
|
||||
body: data,
|
||||
data: data && (data.startsWith('{') || data.startsWith('[')) ?
|
||||
(() => { try { return JSON.parse(data); } catch(e) { return null; } })() : data
|
||||
(() => { try { return JSON.parse(data); } catch(e) { return null; } })() : data,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -143,7 +143,7 @@ async function testCSRFProtection() {
|
||||
const response = await makeRequest('/api/test-endpoint', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: { test: 'data' }
|
||||
body: { test: 'data' },
|
||||
});
|
||||
|
||||
if (response.data?.error?.includes('CSRF') || response.data?.message?.includes('CSRF')) {
|
||||
@@ -183,7 +183,7 @@ async function testRequestSizeLimits() {
|
||||
const response = await makeRequest('/api/services', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(smallPayload)
|
||||
body: JSON.stringify(smallPayload),
|
||||
});
|
||||
|
||||
if (response.statusCode !== 413) {
|
||||
@@ -465,7 +465,7 @@ async function runAllTests() {
|
||||
.forEach(t => log(` ⚠ ${t.name}: ${t.message}`, 'yellow'));
|
||||
}
|
||||
|
||||
log('\n' + '═'.repeat(60), 'cyan');
|
||||
log(`\n${ '═'.repeat(60)}`, 'cyan');
|
||||
|
||||
if (testResults.failed === 0) {
|
||||
log('\n✅ ALL AUTOMATED TESTS PASSED!', 'green');
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
const VALID_TIMEZONES_SAMPLE = [
|
||||
'UTC', 'America/New_York', 'America/Chicago', 'America/Denver', 'America/Los_Angeles',
|
||||
'Europe/London', 'Europe/Paris', 'Europe/Berlin', 'Asia/Tokyo', 'Asia/Shanghai',
|
||||
'Asia/Singapore', 'Australia/Sydney', 'Pacific/Auckland'
|
||||
'Asia/Singapore', 'Australia/Sydney', 'Pacific/Auckland',
|
||||
];
|
||||
|
||||
/**
|
||||
@@ -27,7 +27,7 @@ function validateConfig(config) {
|
||||
if (typeof config.tld !== 'string') {
|
||||
errors.push('tld must be a string');
|
||||
} else {
|
||||
const tld = config.tld.startsWith('.') ? config.tld : '.' + config.tld;
|
||||
const tld = config.tld.startsWith('.') ? config.tld : `.${ config.tld}`;
|
||||
if (!/^\.[a-z0-9][a-z0-9-]*$/.test(tld)) {
|
||||
errors.push(`tld "${config.tld}" contains invalid characters (use lowercase alphanumeric)`);
|
||||
}
|
||||
@@ -117,7 +117,7 @@ function validateConfig(config) {
|
||||
'setupComplete', 'setupCompleted', 'setupMode', 'onboardingCompleted',
|
||||
'configurationType', 'defaults', 'customLogo', 'customFavicon',
|
||||
'dashboardTitle', 'tailscale', 'license', 'skipped',
|
||||
'routingMode', 'domain', 'email', 'defaultIP'
|
||||
'routingMode', 'domain', 'email', 'defaultIP',
|
||||
];
|
||||
for (const key of Object.keys(config)) {
|
||||
if (!knownKeys.includes(key)) {
|
||||
|
||||
@@ -105,7 +105,7 @@ const DOCKER = {
|
||||
TIMEOUT: 30000, // 30s — timeout for docker pull/create operations
|
||||
LOG_CONFIG: {
|
||||
Type: 'json-file',
|
||||
Config: { 'max-size': '10m', 'max-file': '3' } // 30MB max per container
|
||||
Config: { 'max-size': '10m', 'max-file': '3' }, // 30MB max per container
|
||||
},
|
||||
MAINTENANCE: {
|
||||
INTERVAL: 24 * 60 * 60 * 1000, // 24 hours
|
||||
|
||||
@@ -19,7 +19,7 @@ class CredentialManager {
|
||||
this.CACHE_TTL_MS = 5 * 60 * 1000; // 5 minutes
|
||||
this.lockOptions = {
|
||||
retries: { retries: 10, minTimeout: 100, maxTimeout: 300 },
|
||||
stale: 30000
|
||||
stale: 30000,
|
||||
};
|
||||
|
||||
console.log(`[CredentialManager] Initialized with ${this.useKeychain ? 'OS keychain' : 'encrypted file'} storage`);
|
||||
@@ -185,7 +185,7 @@ class CredentialManager {
|
||||
const value = credentials[key].value;
|
||||
decryptedEntries[key] = {
|
||||
plaintext: cryptoUtils.isEncrypted(value) ? cryptoUtils.decrypt(value) : value,
|
||||
metadata: credentials[key].metadata
|
||||
metadata: credentials[key].metadata,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -198,7 +198,7 @@ class CredentialManager {
|
||||
rotated[key] = {
|
||||
value: cryptoUtils.encrypt(decryptedEntries[key].plaintext),
|
||||
metadata: decryptedEntries[key].metadata,
|
||||
rotatedAt: new Date().toISOString()
|
||||
rotatedAt: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -303,7 +303,7 @@ class CredentialManager {
|
||||
credentials[key] = {
|
||||
value: cryptoUtils.encrypt(value),
|
||||
metadata,
|
||||
updatedAt: new Date().toISOString()
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
return credentials;
|
||||
});
|
||||
@@ -360,7 +360,7 @@ class CredentialManager {
|
||||
const backup = {
|
||||
version: '1.0',
|
||||
exportedAt: new Date().toISOString(),
|
||||
credentials
|
||||
credentials,
|
||||
};
|
||||
return cryptoUtils.encrypt(JSON.stringify(backup));
|
||||
}
|
||||
|
||||
@@ -336,5 +336,5 @@ module.exports = {
|
||||
deriveKey,
|
||||
rotateKey,
|
||||
decryptWithKey,
|
||||
clearCachedKey
|
||||
clearCachedKey,
|
||||
};
|
||||
|
||||
@@ -68,7 +68,7 @@ function csrfCookieMiddleware(req, res, next) {
|
||||
secure: req.secure || req.protocol === 'https', // Only secure in HTTPS
|
||||
sameSite: 'strict',
|
||||
path: '/',
|
||||
maxAge: 24 * 60 * 60 * 1000 // 24 hours
|
||||
maxAge: 24 * 60 * 60 * 1000, // 24 hours
|
||||
});
|
||||
|
||||
next();
|
||||
@@ -96,7 +96,7 @@ function csrfValidationMiddleware(req, res, next) {
|
||||
'/api/totp/verify',
|
||||
'/api/totp/verify-setup',
|
||||
'/health',
|
||||
'/api/health'
|
||||
'/api/health',
|
||||
];
|
||||
|
||||
// Check if path starts with excluded prefix
|
||||
@@ -126,7 +126,7 @@ function csrfValidationMiddleware(req, res, next) {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: '[DC-100] CSRF token missing',
|
||||
message: 'CSRF cookie not found. Please refresh the page (Ctrl+Shift+R) and try again.'
|
||||
message: 'CSRF cookie not found. Please refresh the page (Ctrl+Shift+R) and try again.',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -135,7 +135,7 @@ function csrfValidationMiddleware(req, res, next) {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: '[DC-100] CSRF token missing',
|
||||
message: 'CSRF token not provided in request headers. Please refresh the page (Ctrl+Shift+R) and try again.'
|
||||
message: 'CSRF token not provided in request headers. Please refresh the page (Ctrl+Shift+R) and try again.',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -161,7 +161,7 @@ function csrfValidationMiddleware(req, res, next) {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: '[DC-101] CSRF token invalid',
|
||||
message: 'CSRF token validation failed. Please refresh the page (Ctrl+Shift+R) and try again.'
|
||||
message: 'CSRF token validation failed. Please refresh the page (Ctrl+Shift+R) and try again.',
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -174,5 +174,5 @@ module.exports = {
|
||||
signToken,
|
||||
parseCookie,
|
||||
csrfCookieMiddleware,
|
||||
csrfValidationMiddleware
|
||||
csrfValidationMiddleware,
|
||||
};
|
||||
|
||||
@@ -55,7 +55,7 @@ class DockerMaintenance extends EventEmitter {
|
||||
spaceReclaimed: { images: 0, containers: 0, buildCache: 0, total: 0 },
|
||||
diskUsage: null,
|
||||
warnings: [],
|
||||
containersWithoutLogLimits: []
|
||||
containersWithoutLogLimits: [],
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -72,7 +72,7 @@ class DockerMaintenance extends EventEmitter {
|
||||
try {
|
||||
const stopped = await docker.listContainers({
|
||||
all: true,
|
||||
filters: { status: ['exited', 'dead'] }
|
||||
filters: { status: ['exited', 'dead'] },
|
||||
});
|
||||
for (const c of stopped) {
|
||||
// Skip DashCaddy-managed containers — user may want to restart them
|
||||
@@ -108,20 +108,20 @@ class DockerMaintenance extends EventEmitter {
|
||||
result.diskUsage = {
|
||||
images: {
|
||||
count: (df.Images || []).length,
|
||||
sizeBytes: (df.Images || []).reduce((sum, i) => sum + (i.Size || 0), 0)
|
||||
sizeBytes: (df.Images || []).reduce((sum, i) => sum + (i.Size || 0), 0),
|
||||
},
|
||||
containers: {
|
||||
count: (df.Containers || []).length,
|
||||
sizeBytes: (df.Containers || []).reduce((sum, c) => sum + (c.SizeRw || 0), 0)
|
||||
sizeBytes: (df.Containers || []).reduce((sum, c) => sum + (c.SizeRw || 0), 0),
|
||||
},
|
||||
volumes: {
|
||||
count: (df.Volumes?.Volumes || []).length,
|
||||
sizeBytes: (df.Volumes?.Volumes || []).reduce((sum, v) => sum + (v.UsageData?.Size || 0), 0)
|
||||
sizeBytes: (df.Volumes?.Volumes || []).reduce((sum, v) => sum + (v.UsageData?.Size || 0), 0),
|
||||
},
|
||||
buildCache: {
|
||||
count: (df.BuildCache || []).length,
|
||||
sizeBytes: (df.BuildCache || []).reduce((sum, b) => sum + (b.Size || 0), 0)
|
||||
}
|
||||
sizeBytes: (df.BuildCache || []).reduce((sum, b) => sum + (b.Size || 0), 0),
|
||||
},
|
||||
};
|
||||
result.diskUsage.totalBytes =
|
||||
result.diskUsage.images.sizeBytes +
|
||||
@@ -149,7 +149,7 @@ class DockerMaintenance extends EventEmitter {
|
||||
if (!logConfig?.Config?.['max-size']) {
|
||||
result.containersWithoutLogLimits.push({
|
||||
name: c.Names[0]?.replace(/^\//, '') || c.Id.slice(0, 12),
|
||||
id: c.Id.slice(0, 12)
|
||||
id: c.Id.slice(0, 12),
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
@@ -158,7 +158,7 @@ class DockerMaintenance extends EventEmitter {
|
||||
}
|
||||
if (result.containersWithoutLogLimits.length > 0) {
|
||||
result.warnings.push(
|
||||
`${result.containersWithoutLogLimits.length} container(s) have no log rotation — restart or update them to apply log limits: ${result.containersWithoutLogLimits.map(c => c.name).join(', ')}`
|
||||
`${result.containersWithoutLogLimits.length} container(s) have no log rotation — restart or update them to apply log limits: ${result.containersWithoutLogLimits.map(c => c.name).join(', ')}`,
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
@@ -204,7 +204,7 @@ class DockerMaintenance extends EventEmitter {
|
||||
return {
|
||||
running: this.running,
|
||||
lastRun: this.lastRun,
|
||||
lastResult: this.lastResult
|
||||
lastResult: this.lastResult,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ class DockerSecurity {
|
||||
trustedDigests: {},
|
||||
verificationMode: VERIFICATION_MODE,
|
||||
allowUnverified: true,
|
||||
updateTrustedOnPull: true
|
||||
updateTrustedOnPull: true,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -124,7 +124,7 @@ class DockerSecurity {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/vnd.docker.distribution.manifest.v2+json',
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
if (token) {
|
||||
@@ -198,7 +198,7 @@ class DockerSecurity {
|
||||
imageName,
|
||||
actualDigest,
|
||||
trustedDigest: trustedDigest || null,
|
||||
action: 'unknown'
|
||||
action: 'unknown',
|
||||
};
|
||||
|
||||
if (!trustedDigest) {
|
||||
@@ -280,7 +280,7 @@ class DockerSecurity {
|
||||
imageName,
|
||||
action: this.mode === 'permissive' ? 'accept' : 'warn',
|
||||
error: error.message,
|
||||
reason: `Verification error (${this.mode} mode)`
|
||||
reason: `Verification error (${this.mode} mode)`,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -335,7 +335,7 @@ class DockerSecurity {
|
||||
mode: this.mode,
|
||||
trustedImagesCount: Object.keys(this.config.trustedDigests).length,
|
||||
configFile: SECURITY_CONFIG_FILE,
|
||||
updateTrustedOnPull: this.config.updateTrustedOnPull
|
||||
updateTrustedOnPull: this.config.updateTrustedOnPull,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -111,7 +111,7 @@ class HealthChecker extends EventEmitter {
|
||||
responseTime,
|
||||
statusCode: result.statusCode,
|
||||
message: result.message,
|
||||
details: result.details
|
||||
details: result.details,
|
||||
};
|
||||
|
||||
// Track consecutive failures for exponential backoff
|
||||
@@ -136,7 +136,7 @@ class HealthChecker extends EventEmitter {
|
||||
timestamp: new Date().toISOString(),
|
||||
status: 'down',
|
||||
responseTime,
|
||||
error: error.message
|
||||
error: error.message,
|
||||
};
|
||||
|
||||
this.recordStatus(serviceId, status);
|
||||
@@ -170,7 +170,7 @@ class HealthChecker extends EventEmitter {
|
||||
method,
|
||||
timeout: config.timeout || 20000,
|
||||
headers: config.headers || {},
|
||||
rejectUnauthorized: false // Trust internal CA certs (.sami TLD)
|
||||
rejectUnauthorized: false, // Trust internal CA certs (.sami TLD)
|
||||
};
|
||||
|
||||
const req = protocol.request(options, (res) => {
|
||||
@@ -189,8 +189,8 @@ class HealthChecker extends EventEmitter {
|
||||
message: healthy ? 'Service is healthy' : 'Service check failed',
|
||||
details: {
|
||||
headers: res.headers,
|
||||
bodyLength: data.length
|
||||
}
|
||||
bodyLength: data.length,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -306,7 +306,7 @@ class HealthChecker extends EventEmitter {
|
||||
const existing = this.incidents.find(i =>
|
||||
i.serviceId === serviceId &&
|
||||
i.type === type &&
|
||||
i.status === 'open'
|
||||
i.status === 'open',
|
||||
);
|
||||
|
||||
if (existing) {
|
||||
@@ -327,7 +327,7 @@ class HealthChecker extends EventEmitter {
|
||||
createdAt: status.timestamp,
|
||||
lastOccurrence: status.timestamp,
|
||||
occurrences: 1,
|
||||
details: status
|
||||
details: status,
|
||||
};
|
||||
|
||||
this.incidents.push(incident);
|
||||
@@ -343,7 +343,7 @@ class HealthChecker extends EventEmitter {
|
||||
const incident = this.incidents.find(i =>
|
||||
i.serviceId === serviceId &&
|
||||
i.type === type &&
|
||||
i.status === 'open'
|
||||
i.status === 'open',
|
||||
);
|
||||
|
||||
if (incident) {
|
||||
@@ -402,7 +402,7 @@ class HealthChecker extends EventEmitter {
|
||||
const history = this.history[serviceId] || [];
|
||||
|
||||
return history.filter(h =>
|
||||
new Date(h.timestamp).getTime() > cutoffTime
|
||||
new Date(h.timestamp).getTime() > cutoffTime,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -423,10 +423,10 @@ class HealthChecker extends EventEmitter {
|
||||
name: config?.name || serviceId,
|
||||
uptime: {
|
||||
'24h': uptime24h,
|
||||
'7d': uptime7d
|
||||
'7d': uptime7d,
|
||||
},
|
||||
avgResponseTime,
|
||||
sla: config?.sla
|
||||
sla: config?.sla,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -456,8 +456,8 @@ class HealthChecker extends EventEmitter {
|
||||
min: Math.min(...responseTimes),
|
||||
max: Math.max(...responseTimes),
|
||||
p95: this.calculatePercentile(responseTimes, 95),
|
||||
p99: this.calculatePercentile(responseTimes, 99)
|
||||
}
|
||||
p99: this.calculatePercentile(responseTimes, 99),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -504,7 +504,7 @@ class HealthChecker extends EventEmitter {
|
||||
slowResponseThreshold: config.slowResponseThreshold || 5000,
|
||||
sla: config.sla,
|
||||
headers: config.headers || {},
|
||||
body: config.body
|
||||
body: config.body,
|
||||
};
|
||||
|
||||
this.saveConfig();
|
||||
@@ -531,7 +531,7 @@ class HealthChecker extends EventEmitter {
|
||||
|
||||
for (const serviceId in this.history) {
|
||||
this.history[serviceId] = this.history[serviceId].filter(h =>
|
||||
new Date(h.timestamp).getTime() > cutoffTime
|
||||
new Date(h.timestamp).getTime() > cutoffTime,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,7 +30,7 @@ function validateDNSRecord(data) {
|
||||
if (!subdomainRegex.test(data.subdomain)) {
|
||||
errors.push({
|
||||
field: 'subdomain',
|
||||
message: 'Invalid subdomain format. Use only letters, numbers, and hyphens (1-63 chars)'
|
||||
message: 'Invalid subdomain format. Use only letters, numbers, and hyphens (1-63 chars)',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -80,7 +80,7 @@ function validateDNSRecord(data) {
|
||||
subdomain: data.subdomain.toLowerCase().trim(),
|
||||
domain: data.domain ? data.domain.toLowerCase().trim() : null,
|
||||
ip: data.ip.trim(),
|
||||
ttl: data.ttl ? parseInt(data.ttl, 10) : 3600
|
||||
ttl: data.ttl ? parseInt(data.ttl, 10) : 3600,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -99,7 +99,7 @@ function validateDockerDeployment(data) {
|
||||
if (!nameRegex.test(data.name)) {
|
||||
errors.push({
|
||||
field: 'name',
|
||||
message: 'Invalid container name. Use only letters, numbers, underscores, periods, and hyphens'
|
||||
message: 'Invalid container name. Use only letters, numbers, underscores, periods, and hyphens',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -119,7 +119,7 @@ function validateDockerDeployment(data) {
|
||||
if (!imageRegex.test(data.image)) {
|
||||
errors.push({
|
||||
field: 'image',
|
||||
message: 'Invalid Docker image format'
|
||||
message: 'Invalid Docker image format',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -146,7 +146,7 @@ function validateDockerDeployment(data) {
|
||||
if (!portRegex.test(port)) {
|
||||
errors.push({
|
||||
field: `ports[${index}]`,
|
||||
message: 'Invalid port format. Use "host:container" or "host:container/protocol"'
|
||||
message: 'Invalid port format. Use "host:container" or "host:container/protocol"',
|
||||
});
|
||||
} else {
|
||||
const [, hostPort, containerPort] = port.match(portRegex);
|
||||
@@ -193,7 +193,7 @@ function validateDockerDeployment(data) {
|
||||
if (!envKeyRegex.test(key)) {
|
||||
errors.push({
|
||||
field: `environment.${key}`,
|
||||
message: 'Invalid environment variable name'
|
||||
message: 'Invalid environment variable name',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -201,7 +201,7 @@ function validateDockerDeployment(data) {
|
||||
if (typeof value !== 'string' && typeof value !== 'number' && typeof value !== 'boolean') {
|
||||
errors.push({
|
||||
field: `environment.${key}`,
|
||||
message: 'Environment variable value must be string, number, or boolean'
|
||||
message: 'Environment variable value must be string, number, or boolean',
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -219,7 +219,7 @@ function validateDockerDeployment(data) {
|
||||
image: data.image.trim(),
|
||||
ports: data.ports || [],
|
||||
volumes: data.volumes || [],
|
||||
environment: data.environment || {}
|
||||
environment: data.environment || {},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -248,7 +248,7 @@ function validateFilePath(filePath, allowedBasePaths = []) {
|
||||
'C:\\Windows',
|
||||
'C:\\Program Files',
|
||||
'/var/run',
|
||||
'/var/lib/docker'
|
||||
'/var/lib/docker',
|
||||
];
|
||||
|
||||
const lowerPath = normalized.toLowerCase();
|
||||
@@ -284,7 +284,7 @@ function validateVolumePath(volume, index) {
|
||||
if (!match) {
|
||||
errors.push({
|
||||
field: `volumes[${index}]`,
|
||||
message: 'Invalid volume format. Use "host:container" or "host:container:mode"'
|
||||
message: 'Invalid volume format. Use "host:container" or "host:container:mode"',
|
||||
});
|
||||
return errors;
|
||||
}
|
||||
@@ -297,7 +297,7 @@ function validateVolumePath(volume, index) {
|
||||
} catch (error) {
|
||||
errors.push({
|
||||
field: `volumes[${index}].hostPath`,
|
||||
message: `Invalid host path: ${error.message}`
|
||||
message: `Invalid host path: ${error.message}`,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -305,7 +305,7 @@ function validateVolumePath(volume, index) {
|
||||
if (containerPath.includes('..') || !path.isAbsolute(containerPath)) {
|
||||
errors.push({
|
||||
field: `volumes[${index}].containerPath`,
|
||||
message: 'Container path must be absolute and not contain ..'
|
||||
message: 'Container path must be absolute and not contain ..',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -313,7 +313,7 @@ function validateVolumePath(volume, index) {
|
||||
if (mode && !['ro', 'rw', 'z', 'Z'].includes(mode)) {
|
||||
errors.push({
|
||||
field: `volumes[${index}].mode`,
|
||||
message: 'Invalid volume mode. Use ro, rw, z, or Z'
|
||||
message: 'Invalid volume mode. Use ro, rw, z, or Z',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -333,7 +333,7 @@ function validateURL(url, options = {}) {
|
||||
require_protocol: options.requireProtocol !== false,
|
||||
require_valid_protocol: true,
|
||||
allow_underscores: false,
|
||||
...options
|
||||
...options,
|
||||
};
|
||||
|
||||
if (!validator.isURL(url, validatorOptions)) {
|
||||
@@ -451,7 +451,7 @@ function isPrivateIP(ip) {
|
||||
/^169\.254\./,
|
||||
/^::1$/,
|
||||
/^fc00:/,
|
||||
/^fe80:/
|
||||
/^fe80:/,
|
||||
];
|
||||
|
||||
return privateRanges.some(range => range.test(ip));
|
||||
@@ -496,7 +496,7 @@ async function validateSecurePath(requestedPath, allowedRoots, auditLogger = nul
|
||||
auditLogger.logSecurityEvent('path_traversal_blocked', {
|
||||
requestedPath,
|
||||
reason: 'null_byte_detected',
|
||||
severity: 'high'
|
||||
severity: 'high',
|
||||
});
|
||||
}
|
||||
throw new ValidationError('Invalid path - null byte detected', 'path');
|
||||
@@ -510,7 +510,7 @@ async function validateSecurePath(requestedPath, allowedRoots, auditLogger = nul
|
||||
/\.\%2f/i, // .%2F (encoded ./)
|
||||
/%2e\./i, // %2E.
|
||||
/\.\\/, // .\ (Windows)
|
||||
/%5c/i // URL encoded backslash
|
||||
/%5c/i, // URL encoded backslash
|
||||
];
|
||||
|
||||
if (suspiciousPatterns.some(pattern => pattern.test(requestedPath)) ||
|
||||
@@ -520,7 +520,7 @@ async function validateSecurePath(requestedPath, allowedRoots, auditLogger = nul
|
||||
requestedPath,
|
||||
decodedPath,
|
||||
reason: 'traversal_sequence_detected',
|
||||
severity: 'high'
|
||||
severity: 'high',
|
||||
});
|
||||
}
|
||||
throw new ValidationError('Path traversal detected', 'path');
|
||||
@@ -581,7 +581,7 @@ async function validateSecurePath(requestedPath, allowedRoots, auditLogger = nul
|
||||
realPath,
|
||||
allowedRoots,
|
||||
reason: 'outside_allowed_roots',
|
||||
severity: 'critical'
|
||||
severity: 'critical',
|
||||
});
|
||||
}
|
||||
throw new ValidationError('Access denied - path is outside allowed directories', 'path');
|
||||
@@ -602,5 +602,5 @@ module.exports = {
|
||||
sanitizeString,
|
||||
isValidPort,
|
||||
isPrivateIP,
|
||||
validateSecurePath
|
||||
validateSecurePath,
|
||||
};
|
||||
|
||||
@@ -11,17 +11,17 @@ module.exports = {
|
||||
'update-manager.js',
|
||||
'resource-monitor.js',
|
||||
'credential-manager.js',
|
||||
'app-templates.js'
|
||||
'app-templates.js',
|
||||
],
|
||||
coverageThreshold: {
|
||||
global: {
|
||||
branches: 80,
|
||||
functions: 80,
|
||||
lines: 80,
|
||||
statements: 80
|
||||
}
|
||||
statements: 80,
|
||||
},
|
||||
},
|
||||
setupFilesAfterEnv: ['<rootDir>/__tests__/jest.setup.js'],
|
||||
restoreMocks: true,
|
||||
clearMocks: true
|
||||
clearMocks: true,
|
||||
};
|
||||
|
||||
@@ -182,7 +182,7 @@ class KeychainManager {
|
||||
try {
|
||||
execFileSync('secret-tool', ['store', `--label=${SERVICE_NAME}:${account}`, 'service', SERVICE_NAME, 'account', account], {
|
||||
input: value,
|
||||
stdio: ['pipe', 'ignore', 'ignore']
|
||||
stdio: ['pipe', 'ignore', 'ignore'],
|
||||
});
|
||||
return true;
|
||||
} catch {
|
||||
|
||||
@@ -177,7 +177,7 @@ function verifyCode(secret, code) {
|
||||
codeId,
|
||||
createdAt: createdDate.toISOString(),
|
||||
expiresAt: isLifetime ? null : expiresDate.toISOString(),
|
||||
expired: isLifetime ? false : Date.now() > expiresDate.getTime()
|
||||
expired: isLifetime ? false : Date.now() > expiresDate.getTime(),
|
||||
};
|
||||
} catch (error) {
|
||||
return { valid: false, reason: error.message };
|
||||
@@ -230,7 +230,7 @@ Valid durations: ${VALID_DURATIONS.join(', ')} days
|
||||
const isLifetime = result.durationDays === 0;
|
||||
console.log('Code is VALID');
|
||||
console.log(` Version: ${result.version}`);
|
||||
console.log(` Duration: ${isLifetime ? 'LIFETIME' : result.durationDays + ' days'}`);
|
||||
console.log(` Duration: ${isLifetime ? 'LIFETIME' : `${result.durationDays } days`}`);
|
||||
console.log(` Code ID: ${result.codeId}`);
|
||||
console.log(` Created: ${result.createdAt}`);
|
||||
console.log(` Expires: ${isLifetime ? 'NEVER' : result.expiresAt}`);
|
||||
@@ -293,16 +293,16 @@ Valid durations: ${VALID_DURATIONS.join(', ')} days
|
||||
console.log(output);
|
||||
}
|
||||
} else {
|
||||
const lines = codes.map(c => `${c.code} (${c.durationDays === 0 ? 'LIFETIME' : c.durationDays + ' days'}, ID: ${c.codeId})`);
|
||||
const lines = codes.map(c => `${c.code} (${c.durationDays === 0 ? 'LIFETIME' : `${c.durationDays } days`}, ID: ${c.codeId})`);
|
||||
if (outputIndex !== -1) {
|
||||
fs.writeFileSync(args[outputIndex + 1], codes.map(c => c.code).join('\n') + '\n');
|
||||
fs.writeFileSync(args[outputIndex + 1], `${codes.map(c => c.code).join('\n') }\n`);
|
||||
console.log(`${count} code(s) written to ${args[outputIndex + 1]}`);
|
||||
} else {
|
||||
lines.forEach(l => console.log(l));
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\nGenerated ${count} code(s) for ${duration === 0 ? 'LIFETIME' : duration + ' days'}. Next ID: ${startId + count}`);
|
||||
console.log(`\nGenerated ${count} code(s) for ${duration === 0 ? 'LIFETIME' : `${duration } days`}. Next ID: ${startId + count}`);
|
||||
}
|
||||
|
||||
// Also export for use by license-manager.js
|
||||
|
||||
@@ -23,7 +23,7 @@ const LICENSE_SERVER_URL = process.env.LICENSE_SERVER_URL || null; // Set when l
|
||||
const PREMIUM_FEATURES = {
|
||||
sso: { name: 'Auto-Login SSO', description: 'Automatic single sign-on for deployed apps' },
|
||||
recipes: { name: 'Recipes', description: 'Multi-container stack deployment' },
|
||||
swarm: { name: 'Docker Swarm', description: 'Multi-node cluster orchestration' }
|
||||
swarm: { name: 'Docker Swarm', description: 'Multi-node cluster orchestration' },
|
||||
};
|
||||
|
||||
class LicenseManager {
|
||||
@@ -48,13 +48,13 @@ class LicenseManager {
|
||||
if (this.isExpired()) {
|
||||
this.log.info?.('license', 'License has expired', {
|
||||
code: this._maskCode(this.activation.code),
|
||||
expiredAt: this.activation.expiresAt
|
||||
expiredAt: this.activation.expiresAt,
|
||||
});
|
||||
} else {
|
||||
this.log.info?.('license', 'License loaded', {
|
||||
code: this._maskCode(this.activation.code),
|
||||
expiresAt: this.activation.expiresAt,
|
||||
daysRemaining: this.daysRemaining()
|
||||
daysRemaining: this.daysRemaining(),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
@@ -96,7 +96,7 @@ class LicenseManager {
|
||||
os.hostname(),
|
||||
os.platform(),
|
||||
os.arch(),
|
||||
os.cpus()[0]?.model || 'unknown'
|
||||
os.cpus()[0]?.model || 'unknown',
|
||||
];
|
||||
// Get primary MAC address
|
||||
const interfaces = os.networkInterfaces();
|
||||
@@ -132,7 +132,7 @@ class LicenseManager {
|
||||
return {
|
||||
success: true,
|
||||
message: 'This code is already activated',
|
||||
activation: this.getStatus()
|
||||
activation: this.getStatus(),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -170,7 +170,7 @@ class LicenseManager {
|
||||
expiresAt: expiresAt.toISOString(),
|
||||
machineId,
|
||||
validationMethod: 'offline',
|
||||
features: Object.keys(PREMIUM_FEATURES)
|
||||
features: Object.keys(PREMIUM_FEATURES),
|
||||
};
|
||||
} else {
|
||||
// Online validation succeeded — use server response
|
||||
@@ -182,7 +182,7 @@ class LicenseManager {
|
||||
try {
|
||||
await this.credentialManager.store(LICENSE_CRED_KEY, JSON.stringify(this.activation), {
|
||||
activatedAt: this.activation.activatedAt,
|
||||
expiresAt: this.activation.expiresAt
|
||||
expiresAt: this.activation.expiresAt,
|
||||
});
|
||||
} catch (error) {
|
||||
this.log.error?.('license', 'Failed to store activation', { error: error.message });
|
||||
@@ -196,14 +196,14 @@ class LicenseManager {
|
||||
code: this._maskCode(code),
|
||||
durationDays: this.activation.durationDays,
|
||||
expiresAt: this.activation.expiresAt,
|
||||
method: this.activation.validationMethod
|
||||
method: this.activation.validationMethod,
|
||||
});
|
||||
|
||||
const durationLabel = this.activation.lifetime ? 'lifetime' : `${this.activation.durationDays} days`;
|
||||
return {
|
||||
success: true,
|
||||
message: `License activated for ${durationLabel}`,
|
||||
activation: this.getStatus()
|
||||
activation: this.getStatus(),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -247,7 +247,7 @@ class LicenseManager {
|
||||
active: false,
|
||||
tier: 'free',
|
||||
features: [],
|
||||
premiumFeatures: PREMIUM_FEATURES
|
||||
premiumFeatures: PREMIUM_FEATURES,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -267,7 +267,7 @@ class LicenseManager {
|
||||
expired,
|
||||
features: expired ? [] : (this.activation.features || Object.keys(PREMIUM_FEATURES)),
|
||||
premiumFeatures: PREMIUM_FEATURES,
|
||||
validationMethod: this.activation.validationMethod
|
||||
validationMethod: this.activation.validationMethod,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -320,7 +320,7 @@ class LicenseManager {
|
||||
featureName: featureInfo.name,
|
||||
featureDescription: featureInfo.description,
|
||||
currentTier: this.isExpired() ? 'free' : 'expired',
|
||||
upgradeUrl: '/settings#license'
|
||||
upgradeUrl: '/settings#license',
|
||||
});
|
||||
};
|
||||
}
|
||||
@@ -359,7 +359,7 @@ class LicenseManager {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ code, machineId }),
|
||||
signal: AbortSignal.timeout(10000) // 10s timeout
|
||||
signal: AbortSignal.timeout(10000), // 10s timeout
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
@@ -379,8 +379,8 @@ class LicenseManager {
|
||||
expiresAt: data.expiresAt,
|
||||
machineId,
|
||||
features: data.features || Object.keys(PREMIUM_FEATURES),
|
||||
serverToken: data.token
|
||||
}
|
||||
serverToken: data.token,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -388,7 +388,7 @@ class LicenseManager {
|
||||
} catch (error) {
|
||||
// Server unreachable — return null to fallback to offline
|
||||
this.log.warn?.('license', 'License server unreachable, falling back to offline validation', {
|
||||
error: error.message
|
||||
error: error.message,
|
||||
});
|
||||
return null;
|
||||
}
|
||||
@@ -405,9 +405,9 @@ class LicenseManager {
|
||||
body: JSON.stringify({
|
||||
code: this.activation.code,
|
||||
machineId: this.activation.machineId,
|
||||
serverToken: this.activation.serverToken
|
||||
serverToken: this.activation.serverToken,
|
||||
}),
|
||||
signal: AbortSignal.timeout(10000)
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -431,7 +431,7 @@ class LicenseManager {
|
||||
tier: 'premium',
|
||||
expiresAt: this.activation.expiresAt,
|
||||
daysRemaining: this.daysRemaining(),
|
||||
features: this.activation.features || Object.keys(PREMIUM_FEATURES)
|
||||
features: this.activation.features || Object.keys(PREMIUM_FEATURES),
|
||||
};
|
||||
} else {
|
||||
config.license = { active: false, tier: 'free' };
|
||||
|
||||
@@ -18,12 +18,12 @@ const ERROR_PATTERNS = [
|
||||
/\berror\b/i, /\bfailed\b/i, /\bfatal\b/i, /\bpanic\b/i,
|
||||
/\bcrash(ed)?\b/i, /\bexception\b/i, /\btimeout\b/i,
|
||||
/\bOOM\b/, /\bout of memory\b/i, /\bkilled\b/i,
|
||||
/\bdenied\b/i, /\bunauthorized\b/i, /\brefused\b/i
|
||||
/\bdenied\b/i, /\bunauthorized\b/i, /\brefused\b/i,
|
||||
];
|
||||
|
||||
const WARNING_PATTERNS = [
|
||||
/\bwarn(ing)?\b/i, /\bdeprecated\b/i, /\bretry(ing)?\b/i,
|
||||
/\bslow\b/i, /\blatency\b/i
|
||||
/\bslow\b/i, /\blatency\b/i,
|
||||
];
|
||||
|
||||
const EVENT_PATTERNS = [
|
||||
@@ -31,7 +31,7 @@ const EVENT_PATTERNS = [
|
||||
{ pattern: /\b(stop(ped|ping)?|shutdown|exit(ed|ing)?|terminat(ed|ing)?)\b/i, type: 'shutdown' },
|
||||
{ pattern: /\b(restart(ed|ing)?|reload(ed|ing)?)\b/i, type: 'restart' },
|
||||
{ pattern: /\bhealth.?check.*(fail|unhealthy)\b/i, type: 'health_failure' },
|
||||
{ pattern: /\b(update|upgrade|migration)\b/i, type: 'update' }
|
||||
{ pattern: /\b(update|upgrade|migration)\b/i, type: 'update' },
|
||||
];
|
||||
|
||||
class LogDigest extends EventEmitter {
|
||||
@@ -63,7 +63,7 @@ class LogDigest extends EventEmitter {
|
||||
// Collect logs every hour
|
||||
this.collectInterval = setInterval(() => {
|
||||
this._collectHourlyLogs().catch(e =>
|
||||
console.error('[LogDigest] Hourly collection failed:', e.message)
|
||||
console.error('[LogDigest] Hourly collection failed:', e.message),
|
||||
);
|
||||
}, DOCKER.DIGEST.COLLECT_INTERVAL);
|
||||
|
||||
@@ -102,7 +102,7 @@ class LogDigest extends EventEmitter {
|
||||
const hourSummary = {
|
||||
hour: hourKey,
|
||||
timestamp: now.toISOString(),
|
||||
services: {}
|
||||
services: {},
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -123,7 +123,7 @@ class LogDigest extends EventEmitter {
|
||||
events: [],
|
||||
errorCount: 0,
|
||||
warningCount: 0,
|
||||
totalLines: 0
|
||||
totalLines: 0,
|
||||
};
|
||||
|
||||
if (isRunning) {
|
||||
@@ -134,7 +134,7 @@ class LogDigest extends EventEmitter {
|
||||
stderr: true,
|
||||
since: sinceTimestamp,
|
||||
tail: DOCKER.DIGEST.LOG_TAIL,
|
||||
timestamps: true
|
||||
timestamps: true,
|
||||
});
|
||||
|
||||
const lines = this._parseDockerLogs(logBuffer);
|
||||
@@ -147,7 +147,7 @@ class LogDigest extends EventEmitter {
|
||||
if (serviceSummary.errors.length < 10) {
|
||||
serviceSummary.errors.push({
|
||||
time: line.timestamp || hourKey,
|
||||
text: line.text.slice(0, 500)
|
||||
text: line.text.slice(0, 500),
|
||||
});
|
||||
}
|
||||
continue;
|
||||
@@ -159,7 +159,7 @@ class LogDigest extends EventEmitter {
|
||||
if (serviceSummary.warnings.length < 5) {
|
||||
serviceSummary.warnings.push({
|
||||
time: line.timestamp || hourKey,
|
||||
text: line.text.slice(0, 300)
|
||||
text: line.text.slice(0, 300),
|
||||
});
|
||||
}
|
||||
continue;
|
||||
@@ -171,7 +171,7 @@ class LogDigest extends EventEmitter {
|
||||
serviceSummary.events.push({
|
||||
type,
|
||||
time: line.timestamp || hourKey,
|
||||
text: line.text.slice(0, 300)
|
||||
text: line.text.slice(0, 300),
|
||||
});
|
||||
break;
|
||||
}
|
||||
@@ -180,7 +180,7 @@ class LogDigest extends EventEmitter {
|
||||
} catch (logErr) {
|
||||
serviceSummary.errors.push({
|
||||
time: now.toISOString(),
|
||||
text: `Failed to fetch logs: ${logErr.message}`
|
||||
text: `Failed to fetch logs: ${logErr.message}`,
|
||||
});
|
||||
serviceSummary.errorCount++;
|
||||
}
|
||||
@@ -188,7 +188,7 @@ class LogDigest extends EventEmitter {
|
||||
serviceSummary.events.push({
|
||||
type: 'not_running',
|
||||
time: now.toISOString(),
|
||||
text: `Container is ${containerInfo.State}`
|
||||
text: `Container is ${containerInfo.State}`,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -237,7 +237,7 @@ class LogDigest extends EventEmitter {
|
||||
lines.push({
|
||||
stream: streamType === 2 ? 'stderr' : 'stdout',
|
||||
text: message,
|
||||
timestamp
|
||||
timestamp,
|
||||
});
|
||||
}
|
||||
offset += 8 + size;
|
||||
@@ -258,7 +258,7 @@ class LogDigest extends EventEmitter {
|
||||
const delay = next.getTime() - now.getTime();
|
||||
this.digestTimeout = setTimeout(() => {
|
||||
this.generateDailyDigest().catch(e =>
|
||||
console.error('[LogDigest] Daily digest generation failed:', e.message)
|
||||
console.error('[LogDigest] Daily digest generation failed:', e.message),
|
||||
);
|
||||
// Reschedule for tomorrow
|
||||
if (this.running) this._scheduleDailyDigest();
|
||||
@@ -288,7 +288,7 @@ class LogDigest extends EventEmitter {
|
||||
totalLines: 0,
|
||||
lastState: svc.state,
|
||||
topErrors: [],
|
||||
events: []
|
||||
events: [],
|
||||
};
|
||||
}
|
||||
const agg = serviceAgg[appId];
|
||||
@@ -332,8 +332,8 @@ class LogDigest extends EventEmitter {
|
||||
totalServices: Object.keys(serviceAgg).length,
|
||||
servicesWithErrors: Object.values(serviceAgg).filter(s => s.totalErrors > 0).length,
|
||||
totalErrors: Object.values(serviceAgg).reduce((sum, s) => sum + s.totalErrors, 0),
|
||||
totalWarnings: Object.values(serviceAgg).reduce((sum, s) => sum + s.totalWarnings, 0)
|
||||
}
|
||||
totalWarnings: Object.values(serviceAgg).reduce((sum, s) => sum + s.totalWarnings, 0),
|
||||
},
|
||||
};
|
||||
|
||||
// Write formatted digest file
|
||||
@@ -369,7 +369,7 @@ class LogDigest extends EventEmitter {
|
||||
lines.push('');
|
||||
|
||||
// Service summary table
|
||||
lines.push('-- Service Summary ' + '-'.repeat(36));
|
||||
lines.push(`-- Service Summary ${ '-'.repeat(36)}`);
|
||||
const services = Object.values(digest.services);
|
||||
if (services.length === 0) {
|
||||
lines.push(' No managed services found.');
|
||||
@@ -387,14 +387,14 @@ class LogDigest extends EventEmitter {
|
||||
// Notable events
|
||||
const events = digest.notableEvents;
|
||||
if (events.length > 0) {
|
||||
lines.push('-- Notable Events ' + '-'.repeat(37));
|
||||
lines.push(`-- Notable Events ${ '-'.repeat(37)}`);
|
||||
for (const evt of events) {
|
||||
const time = (evt.time || '').slice(11, 16) || '??:??';
|
||||
lines.push(` [${time}] ${evt.service}: ${evt.text.slice(0, 80)}`);
|
||||
// Add guidance for where to look further
|
||||
const containerName = `${DOCKER.CONTAINER_PREFIX}${evt.appId}`;
|
||||
if (evt.type === 'health_failure' || evt.type === 'restart') {
|
||||
const sinceDate = digest.date + 'T' + (evt.time || '').slice(11, 13) + ':00:00';
|
||||
const sinceDate = `${digest.date }T${ (evt.time || '').slice(11, 13) }:00:00`;
|
||||
lines.push(` See: docker logs ${containerName} --since ${sinceDate}`);
|
||||
}
|
||||
}
|
||||
@@ -404,7 +404,7 @@ class LogDigest extends EventEmitter {
|
||||
// Top errors per service
|
||||
const errServices = services.filter(s => s.totalErrors > 0);
|
||||
if (errServices.length > 0) {
|
||||
lines.push('-- Error Details ' + '-'.repeat(38));
|
||||
lines.push(`-- Error Details ${ '-'.repeat(38)}`);
|
||||
for (const svc of errServices) {
|
||||
lines.push(` ${svc.name} (${svc.totalErrors} errors):`);
|
||||
for (const err of svc.topErrors) {
|
||||
@@ -419,7 +419,7 @@ class LogDigest extends EventEmitter {
|
||||
|
||||
// Docker disk usage
|
||||
if (digest.diskUsage) {
|
||||
lines.push('-- Docker Disk Usage ' + '-'.repeat(34));
|
||||
lines.push(`-- Docker Disk Usage ${ '-'.repeat(34)}`);
|
||||
const du = digest.diskUsage;
|
||||
lines.push(` Images: ${formatBytes(du.images.sizeBytes)} (${du.images.count} images)`);
|
||||
lines.push(` Containers: ${formatBytes(du.containers.sizeBytes)}`);
|
||||
@@ -439,7 +439,7 @@ class LogDigest extends EventEmitter {
|
||||
lines.push(` Hours collected: ${digest.hoursCollected}/24`);
|
||||
lines.push(hr);
|
||||
|
||||
return lines.join('\n') + '\n';
|
||||
return `${lines.join('\n') }\n`;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -551,7 +551,7 @@ class LogDigest extends EventEmitter {
|
||||
date: today,
|
||||
hoursCollected: todayHours.length,
|
||||
lastCollect: this.lastCollect,
|
||||
services: serviceAgg
|
||||
services: serviceAgg,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -560,7 +560,7 @@ class LogDigest extends EventEmitter {
|
||||
running: this.running,
|
||||
lastCollect: this.lastCollect,
|
||||
hourlySummaries: this.hourlySummaries.length,
|
||||
digestDir: this.digestDir
|
||||
digestDir: this.digestDir,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -569,7 +569,7 @@ function formatBytes(bytes) {
|
||||
if (bytes === 0) return '0 B';
|
||||
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
||||
return (bytes / Math.pow(1024, i)).toFixed(1) + ' ' + units[i];
|
||||
return `${(bytes / Math.pow(1024, i)).toFixed(1) } ${ units[i]}`;
|
||||
}
|
||||
|
||||
module.exports = new LogDigest();
|
||||
|
||||
@@ -37,7 +37,7 @@ const SENSITIVE_FIELDS = [
|
||||
'masterKey',
|
||||
'master_key',
|
||||
'encryptionKey',
|
||||
'encryption_key'
|
||||
'encryption_key',
|
||||
];
|
||||
|
||||
/**
|
||||
@@ -116,7 +116,7 @@ function safeLog(message, data = {}, additionalSensitiveKeys = []) {
|
||||
return {
|
||||
message,
|
||||
data: sanitizeForLog(data, additionalSensitiveKeys),
|
||||
timestamp: new Date().toISOString()
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -124,5 +124,5 @@ module.exports = {
|
||||
sanitizeForLog,
|
||||
redactCredential,
|
||||
safeLog,
|
||||
SENSITIVE_FIELDS
|
||||
SENSITIVE_FIELDS,
|
||||
};
|
||||
|
||||
@@ -11,11 +11,11 @@ class Metrics {
|
||||
total: 0,
|
||||
byStatus: {},
|
||||
byMethod: {},
|
||||
byPath: {}
|
||||
byPath: {},
|
||||
};
|
||||
this.errors = {
|
||||
total: 0,
|
||||
byType: {}
|
||||
byType: {},
|
||||
};
|
||||
this.business = {
|
||||
containersDeployed: 0,
|
||||
@@ -26,7 +26,7 @@ class Metrics {
|
||||
totpLogins: 0,
|
||||
siteAdded: 0,
|
||||
siteRemoved: 0,
|
||||
credentialRotations: 0
|
||||
credentialRotations: 0,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -78,19 +78,19 @@ class Metrics {
|
||||
perSecond: uptimeSec > 0 ? +(this.requests.total / uptimeSec).toFixed(2) : 0,
|
||||
byStatus: this.requests.byStatus,
|
||||
byMethod: this.requests.byMethod,
|
||||
topEndpoints
|
||||
topEndpoints,
|
||||
},
|
||||
errors: {
|
||||
total: this.errors.total,
|
||||
rate: this.requests.total > 0 ? +((this.errors.total / this.requests.total) * 100).toFixed(2) : 0,
|
||||
byType: this.errors.byType
|
||||
byType: this.errors.byType,
|
||||
},
|
||||
business: this.business,
|
||||
process: {
|
||||
memory: process.memoryUsage(),
|
||||
pid: process.pid,
|
||||
nodeVersion: process.version
|
||||
}
|
||||
nodeVersion: process.version,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ const { CACHE_CONFIGS, createCache } = require('./cache-config');
|
||||
module.exports = function configureMiddleware(app, {
|
||||
siteConfig, totpConfig, tailscaleConfig,
|
||||
metrics, auditLogger, authManager, log, cryptoUtils,
|
||||
isValidContainerId, isTailscaleIP, getTailscaleStatus
|
||||
isValidContainerId, isTailscaleIP, getTailscaleStatus,
|
||||
}) {
|
||||
|
||||
// ── Container ID param validation ──
|
||||
@@ -44,7 +44,7 @@ module.exports = function configureMiddleware(app, {
|
||||
app.use(cors({
|
||||
origin: corsOrigins,
|
||||
methods: ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS'],
|
||||
credentials: true
|
||||
credentials: true,
|
||||
}));
|
||||
|
||||
// ── Security headers with Helmet ──
|
||||
@@ -54,16 +54,16 @@ module.exports = function configureMiddleware(app, {
|
||||
defaultSrc: ["'self'"],
|
||||
styleSrc: ["'self'"],
|
||||
scriptSrc: ["'self'"],
|
||||
imgSrc: ["'self'", "data:", "https:"],
|
||||
imgSrc: ["'self'", 'data:', 'https:'],
|
||||
connectSrc: ["'self'"],
|
||||
fontSrc: ["'self'", "data:"],
|
||||
fontSrc: ["'self'", 'data:'],
|
||||
objectSrc: ["'none'"],
|
||||
mediaSrc: ["'self'"],
|
||||
frameSrc: ["'none'"]
|
||||
}
|
||||
frameSrc: ["'none'"],
|
||||
},
|
||||
},
|
||||
crossOriginEmbedderPolicy: false,
|
||||
crossOriginResourcePolicy: { policy: "cross-origin" }
|
||||
crossOriginResourcePolicy: { policy: 'cross-origin' },
|
||||
}));
|
||||
|
||||
// ── Trust proxy (one hop — Caddy) ──
|
||||
@@ -95,7 +95,7 @@ module.exports = function configureMiddleware(app, {
|
||||
if (req.path !== '/health' && req.path !== '/api/health') {
|
||||
const level = res.statusCode >= 500 ? 'error' : res.statusCode >= 400 ? 'warn' : 'debug';
|
||||
log[level]('http', `${req.method} ${req.path} ${res.statusCode}`, {
|
||||
ms: duration, ip: req.ip, id: req.id
|
||||
ms: duration, ip: req.ip, id: req.id,
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -128,7 +128,7 @@ module.exports = function configureMiddleware(app, {
|
||||
success: false,
|
||||
error: '[DC-120] Access denied. This dashboard requires Tailscale connection.',
|
||||
requiresTailscale: true,
|
||||
clientIP: clientIP
|
||||
clientIP: clientIP,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -151,7 +151,7 @@ module.exports = function configureMiddleware(app, {
|
||||
success: false,
|
||||
error: '[DC-121] Access denied. Device not in allowed tailnet.',
|
||||
requiresTailscale: true,
|
||||
clientIP
|
||||
clientIP,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -178,7 +178,7 @@ module.exports = function configureMiddleware(app, {
|
||||
'8h': 8 * 60 * 60 * 1000,
|
||||
'12h': 12 * 60 * 60 * 1000,
|
||||
'24h': 24 * 60 * 60 * 1000,
|
||||
'never': null
|
||||
'never': null,
|
||||
};
|
||||
|
||||
// IP-based session store (solves cross-domain cookie issues with .sami TLD)
|
||||
@@ -222,7 +222,7 @@ module.exports = function configureMiddleware(app, {
|
||||
const key = cryptoUtils.loadOrCreateKey();
|
||||
const sig = crypto.createHmac('sha256', key).update(payloadB64).digest('base64url');
|
||||
res.setHeader('Set-Cookie',
|
||||
`${SESSION_COOKIE_NAME}=${payloadB64}.${sig}; Max-Age=${maxAge}; Path=/; HttpOnly; Secure; SameSite=Lax`
|
||||
`${SESSION_COOKIE_NAME}=${payloadB64}.${sig}; Max-Age=${maxAge}; Path=/; HttpOnly; Secure; SameSite=Lax`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -254,7 +254,7 @@ module.exports = function configureMiddleware(app, {
|
||||
|
||||
function clearSessionCookie(res) {
|
||||
res.setHeader('Set-Cookie',
|
||||
`${SESSION_COOKIE_NAME}=; Max-Age=0; Path=/; HttpOnly; SameSite=Lax`
|
||||
`${SESSION_COOKIE_NAME}=; Max-Age=0; Path=/; HttpOnly; SameSite=Lax`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -324,7 +324,7 @@ module.exports = function configureMiddleware(app, {
|
||||
if (req.totpSessionValid || isSessionValid(req)) {
|
||||
req.auth = {
|
||||
type: 'session',
|
||||
scope: ['admin']
|
||||
scope: ['admin'],
|
||||
};
|
||||
return next();
|
||||
}
|
||||
@@ -340,7 +340,7 @@ module.exports = function configureMiddleware(app, {
|
||||
req.auth = {
|
||||
type: 'jwt',
|
||||
userId: jwtPayload.userId,
|
||||
scope: jwtPayload.scope || []
|
||||
scope: jwtPayload.scope || [],
|
||||
};
|
||||
return next();
|
||||
}
|
||||
@@ -355,7 +355,7 @@ module.exports = function configureMiddleware(app, {
|
||||
type: 'apikey',
|
||||
keyId: keyData.keyId,
|
||||
name: keyData.name,
|
||||
scope: keyData.scopes || []
|
||||
scope: keyData.scopes || [],
|
||||
};
|
||||
return next();
|
||||
}
|
||||
@@ -364,7 +364,7 @@ module.exports = function configureMiddleware(app, {
|
||||
if (!totpConfig.enabled || totpConfig.sessionDuration === 'never') {
|
||||
req.auth = {
|
||||
type: 'none',
|
||||
scope: ['admin']
|
||||
scope: ['admin'],
|
||||
};
|
||||
return next();
|
||||
}
|
||||
@@ -372,7 +372,7 @@ module.exports = function configureMiddleware(app, {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: '[DC-110] Authentication required - provide TOTP session, JWT token, or API key',
|
||||
requiresTotp: totpConfig.enabled
|
||||
requiresTotp: totpConfig.enabled,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -385,7 +385,7 @@ module.exports = function configureMiddleware(app, {
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false,
|
||||
skip: (req) => isTest || req.path === '/health' || req.path === '/api/health' || req.path.startsWith('/probe/') || req.path.startsWith('/api/auth/gate/') || req.path === '/api/totp/check-session' || req.path.endsWith('/health-checks/status') || req.path.endsWith('/csrf-token') || req.path === '/api/v1/dns/logs',
|
||||
message: { success: false, error: 'Too many requests, please try again later' }
|
||||
message: { success: false, error: 'Too many requests, please try again later' },
|
||||
});
|
||||
|
||||
const strictLimiter = rateLimit({
|
||||
@@ -393,7 +393,7 @@ module.exports = function configureMiddleware(app, {
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false,
|
||||
skip: () => isTest,
|
||||
message: { success: false, error: 'Too many requests to this endpoint, please try again later' }
|
||||
message: { success: false, error: 'Too many requests to this endpoint, please try again later' },
|
||||
});
|
||||
|
||||
app.use(generalLimiter);
|
||||
@@ -407,7 +407,7 @@ module.exports = function configureMiddleware(app, {
|
||||
...RATE_LIMITS.TOTP,
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false,
|
||||
message: { success: false, error: 'Too many TOTP attempts, please try again later' }
|
||||
message: { success: false, error: 'Too many TOTP attempts, please try again later' },
|
||||
});
|
||||
app.use('/api/totp/verify', totpLimiter);
|
||||
app.use('/api/totp/verify-setup', totpLimiter);
|
||||
@@ -425,6 +425,6 @@ module.exports = function configureMiddleware(app, {
|
||||
clearIPSession,
|
||||
clearSessionCookie,
|
||||
isSessionValid,
|
||||
ipSessions
|
||||
ipSessions,
|
||||
};
|
||||
};
|
||||
|
||||
877
dashcaddy-api/package-lock.json
generated
877
dashcaddy-api/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -7,7 +7,10 @@
|
||||
"start": "node server.js",
|
||||
"test": "jest",
|
||||
"test:watch": "jest --watch",
|
||||
"test:coverage": "jest --coverage"
|
||||
"test:coverage": "jest --coverage",
|
||||
"lint": "eslint .",
|
||||
"lint:fix": "eslint . --fix",
|
||||
"format": "prettier --write '**/*.{js,json,md}'"
|
||||
},
|
||||
"dependencies": {
|
||||
"compression": "^1.8.1",
|
||||
@@ -26,7 +29,9 @@
|
||||
"validator": "^13.11.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "^8.57.1",
|
||||
"jest": "^29.7.0",
|
||||
"prettier": "^3.8.1",
|
||||
"supertest": "^6.3.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,12 +51,12 @@ const paths = {
|
||||
process.env.APPDATA || 'C:\\Users',
|
||||
'C:\\ProgramData',
|
||||
'/var/log',
|
||||
'/opt'
|
||||
'/opt',
|
||||
]
|
||||
: [
|
||||
'/var/log',
|
||||
'/opt',
|
||||
'/home'
|
||||
'/home',
|
||||
],
|
||||
|
||||
// Platform detection helpers
|
||||
|
||||
@@ -16,10 +16,10 @@ const LOCK_RETRY_OPTIONS = {
|
||||
retries: 10,
|
||||
minTimeout: 100,
|
||||
maxTimeout: 1000,
|
||||
randomize: true
|
||||
randomize: true,
|
||||
},
|
||||
stale: LOCK_STALE_THRESHOLD,
|
||||
realpath: false
|
||||
realpath: false,
|
||||
};
|
||||
|
||||
class PortLockManager {
|
||||
@@ -72,7 +72,7 @@ class PortLockManager {
|
||||
if (!fs.existsSync(lockFilePath)) {
|
||||
fs.writeFileSync(lockFilePath, JSON.stringify({
|
||||
created: new Date().toISOString(),
|
||||
port
|
||||
port,
|
||||
}));
|
||||
}
|
||||
|
||||
@@ -89,7 +89,7 @@ class PortLockManager {
|
||||
this.activeLocks.set(lockId, {
|
||||
ports: sortedPorts,
|
||||
releases: releaseFunctions,
|
||||
timestamp: Date.now()
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
|
||||
console.log(`[PortLockManager] Successfully acquired all locks (ID: ${lockId})`);
|
||||
@@ -97,13 +97,13 @@ class PortLockManager {
|
||||
|
||||
} catch (error) {
|
||||
// Release any locks we managed to acquire
|
||||
console.error(`[PortLockManager] Failed to acquire all locks:`, error.message);
|
||||
console.error('[PortLockManager] Failed to acquire all locks:', error.message);
|
||||
|
||||
for (const release of releaseFunctions) {
|
||||
try {
|
||||
await release();
|
||||
} catch (releaseError) {
|
||||
console.error(`[PortLockManager] Error releasing lock during cleanup:`, releaseError.message);
|
||||
console.error('[PortLockManager] Error releasing lock during cleanup:', releaseError.message);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -132,7 +132,7 @@ class PortLockManager {
|
||||
await release();
|
||||
} catch (error) {
|
||||
errors.push(error.message);
|
||||
console.error(`[PortLockManager] Error releasing lock:`, error.message);
|
||||
console.error('[PortLockManager] Error releasing lock:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -198,13 +198,13 @@ class PortLockManager {
|
||||
lockId,
|
||||
ports: info.ports,
|
||||
age: Date.now() - info.timestamp,
|
||||
timestamp: new Date(info.timestamp).toISOString()
|
||||
timestamp: new Date(info.timestamp).toISOString(),
|
||||
}));
|
||||
|
||||
return {
|
||||
activeLocks: activeLocks.length,
|
||||
locks: activeLocks,
|
||||
lockDirectory: LOCK_DIR
|
||||
lockDirectory: LOCK_DIR,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -4,336 +4,336 @@
|
||||
const RECIPE_TEMPLATES = {
|
||||
|
||||
// === MEDIA & ENTERTAINMENT ===
|
||||
"htpc-suite": {
|
||||
name: "HTPC Suite",
|
||||
description: "Complete media automation: find, download, organize, and stream",
|
||||
icon: "\uD83C\uDFAC",
|
||||
category: "Media",
|
||||
type: "recipe",
|
||||
difficulty: "Intermediate",
|
||||
'htpc-suite': {
|
||||
name: 'HTPC Suite',
|
||||
description: 'Complete media automation: find, download, organize, and stream',
|
||||
icon: '\uD83C\uDFAC',
|
||||
category: 'Media',
|
||||
type: 'recipe',
|
||||
difficulty: 'Intermediate',
|
||||
popularity: 98,
|
||||
components: [
|
||||
{
|
||||
id: "prowlarr",
|
||||
role: "Indexer Manager",
|
||||
templateRef: "prowlarr",
|
||||
id: 'prowlarr',
|
||||
role: 'Indexer Manager',
|
||||
templateRef: 'prowlarr',
|
||||
required: true,
|
||||
order: 1
|
||||
order: 1,
|
||||
},
|
||||
{
|
||||
id: "qbittorrent",
|
||||
role: "Download Client",
|
||||
templateRef: "qbittorrent",
|
||||
id: 'qbittorrent',
|
||||
role: 'Download Client',
|
||||
templateRef: 'qbittorrent',
|
||||
required: true,
|
||||
order: 2
|
||||
order: 2,
|
||||
},
|
||||
{
|
||||
id: "sonarr",
|
||||
role: "TV Show Manager",
|
||||
templateRef: "sonarr",
|
||||
id: 'sonarr',
|
||||
role: 'TV Show Manager',
|
||||
templateRef: 'sonarr',
|
||||
required: true,
|
||||
order: 3
|
||||
order: 3,
|
||||
},
|
||||
{
|
||||
id: "radarr",
|
||||
role: "Movie Manager",
|
||||
templateRef: "radarr",
|
||||
id: 'radarr',
|
||||
role: 'Movie Manager',
|
||||
templateRef: 'radarr',
|
||||
required: true,
|
||||
order: 4
|
||||
order: 4,
|
||||
},
|
||||
{
|
||||
id: "lidarr",
|
||||
role: "Music Manager",
|
||||
templateRef: "lidarr",
|
||||
id: 'lidarr',
|
||||
role: 'Music Manager',
|
||||
templateRef: 'lidarr',
|
||||
required: false,
|
||||
order: 5
|
||||
order: 5,
|
||||
},
|
||||
{
|
||||
id: "overseerr",
|
||||
role: "Request Manager",
|
||||
templateRef: "seerr",
|
||||
id: 'overseerr',
|
||||
role: 'Request Manager',
|
||||
templateRef: 'seerr',
|
||||
required: false,
|
||||
order: 6
|
||||
}
|
||||
order: 6,
|
||||
},
|
||||
],
|
||||
sharedVolumes: {
|
||||
media: {
|
||||
label: "Media Library",
|
||||
description: "Root folder for all media (movies, TV, music)",
|
||||
defaultPath: "/media",
|
||||
usedBy: ["sonarr", "radarr", "lidarr", "qbittorrent"]
|
||||
label: 'Media Library',
|
||||
description: 'Root folder for all media (movies, TV, music)',
|
||||
defaultPath: '/media',
|
||||
usedBy: ['sonarr', 'radarr', 'lidarr', 'qbittorrent'],
|
||||
},
|
||||
downloads: {
|
||||
label: "Downloads",
|
||||
description: "Shared downloads folder for all download clients",
|
||||
defaultPath: "/downloads",
|
||||
usedBy: ["sonarr", "radarr", "lidarr", "qbittorrent"]
|
||||
}
|
||||
label: 'Downloads',
|
||||
description: 'Shared downloads folder for all download clients',
|
||||
defaultPath: '/downloads',
|
||||
usedBy: ['sonarr', 'radarr', 'lidarr', 'qbittorrent'],
|
||||
},
|
||||
},
|
||||
autoConnect: {
|
||||
enabled: true,
|
||||
description: "Automatically connects Sonarr/Radarr to Prowlarr and qBittorrent",
|
||||
description: 'Automatically connects Sonarr/Radarr to Prowlarr and qBittorrent',
|
||||
steps: [
|
||||
{ action: "configureProwlarrApps", targets: ["sonarr", "radarr", "lidarr"] },
|
||||
{ action: "configureDownloadClient", client: "qbittorrent", targets: ["sonarr", "radarr", "lidarr"] }
|
||||
]
|
||||
{ action: 'configureProwlarrApps', targets: ['sonarr', 'radarr', 'lidarr'] },
|
||||
{ action: 'configureDownloadClient', client: 'qbittorrent', targets: ['sonarr', 'radarr', 'lidarr'] },
|
||||
],
|
||||
},
|
||||
setupInstructions: [
|
||||
"All services share the same media and downloads folders",
|
||||
"Prowlarr is pre-connected to Sonarr, Radarr, and Lidarr",
|
||||
"Add indexers in Prowlarr \u2014 they sync automatically to all *arr apps",
|
||||
"Add your media library root folders in Sonarr and Radarr",
|
||||
"qBittorrent is pre-configured as the download client"
|
||||
]
|
||||
'All services share the same media and downloads folders',
|
||||
'Prowlarr is pre-connected to Sonarr, Radarr, and Lidarr',
|
||||
'Add indexers in Prowlarr \u2014 they sync automatically to all *arr apps',
|
||||
'Add your media library root folders in Sonarr and Radarr',
|
||||
'qBittorrent is pre-configured as the download client',
|
||||
],
|
||||
},
|
||||
|
||||
// === PRODUCTIVITY ===
|
||||
"nextcloud-complete": {
|
||||
name: "Nextcloud Complete",
|
||||
description: "Full productivity suite: cloud storage, office editing, and collaboration",
|
||||
icon: "\u2601\uFE0F",
|
||||
category: "Productivity",
|
||||
type: "recipe",
|
||||
difficulty: "Intermediate",
|
||||
'nextcloud-complete': {
|
||||
name: 'Nextcloud Complete',
|
||||
description: 'Full productivity suite: cloud storage, office editing, and collaboration',
|
||||
icon: '\u2601\uFE0F',
|
||||
category: 'Productivity',
|
||||
type: 'recipe',
|
||||
difficulty: 'Intermediate',
|
||||
popularity: 90,
|
||||
components: [
|
||||
{
|
||||
id: "nextcloud-db",
|
||||
role: "Database",
|
||||
id: 'nextcloud-db',
|
||||
role: 'Database',
|
||||
required: true,
|
||||
order: 0,
|
||||
docker: {
|
||||
image: "mariadb:11",
|
||||
image: 'mariadb:11',
|
||||
ports: [],
|
||||
volumes: ["/opt/nextcloud-db/data:/var/lib/mysql"],
|
||||
volumes: ['/opt/nextcloud-db/data:/var/lib/mysql'],
|
||||
environment: {
|
||||
"MYSQL_ROOT_PASSWORD": "{{GENERATED_PASSWORD}}",
|
||||
"MYSQL_DATABASE": "nextcloud",
|
||||
"MYSQL_USER": "nextcloud",
|
||||
"MYSQL_PASSWORD": "{{GENERATED_PASSWORD}}"
|
||||
}
|
||||
'MYSQL_ROOT_PASSWORD': '{{GENERATED_PASSWORD}}',
|
||||
'MYSQL_DATABASE': 'nextcloud',
|
||||
'MYSQL_USER': 'nextcloud',
|
||||
'MYSQL_PASSWORD': '{{GENERATED_PASSWORD}}',
|
||||
},
|
||||
internal: true
|
||||
},
|
||||
internal: true,
|
||||
},
|
||||
{
|
||||
id: "nextcloud-redis",
|
||||
role: "Cache",
|
||||
id: 'nextcloud-redis',
|
||||
role: 'Cache',
|
||||
required: true,
|
||||
order: 0,
|
||||
docker: {
|
||||
image: "redis:7-alpine",
|
||||
image: 'redis:7-alpine',
|
||||
ports: [],
|
||||
volumes: ["/opt/nextcloud-redis/data:/data"],
|
||||
environment: {}
|
||||
volumes: ['/opt/nextcloud-redis/data:/data'],
|
||||
environment: {},
|
||||
},
|
||||
internal: true
|
||||
internal: true,
|
||||
},
|
||||
{
|
||||
id: "nextcloud",
|
||||
role: "Cloud Platform",
|
||||
templateRef: "nextcloud",
|
||||
id: 'nextcloud',
|
||||
role: 'Cloud Platform',
|
||||
templateRef: 'nextcloud',
|
||||
required: true,
|
||||
order: 1,
|
||||
envOverrides: {
|
||||
"MYSQL_HOST": "dashcaddy-nextcloud-db",
|
||||
"MYSQL_DATABASE": "nextcloud",
|
||||
"MYSQL_USER": "nextcloud",
|
||||
"MYSQL_PASSWORD": "{{GENERATED_PASSWORD}}",
|
||||
"REDIS_HOST": "dashcaddy-nextcloud-redis"
|
||||
}
|
||||
'MYSQL_HOST': 'dashcaddy-nextcloud-db',
|
||||
'MYSQL_DATABASE': 'nextcloud',
|
||||
'MYSQL_USER': 'nextcloud',
|
||||
'MYSQL_PASSWORD': '{{GENERATED_PASSWORD}}',
|
||||
'REDIS_HOST': 'dashcaddy-nextcloud-redis',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "collabora",
|
||||
role: "Office Suite",
|
||||
id: 'collabora',
|
||||
role: 'Office Suite',
|
||||
required: false,
|
||||
order: 2,
|
||||
docker: {
|
||||
image: "collabora/code:latest",
|
||||
ports: ["{{PORT}}:9980"],
|
||||
image: 'collabora/code:latest',
|
||||
ports: ['{{PORT}}:9980'],
|
||||
volumes: [],
|
||||
environment: {
|
||||
"aliasgroup1": "https://{{NEXTCLOUD_DOMAIN}}",
|
||||
"extra_params": "--o:ssl.enable=false --o:ssl.termination=true"
|
||||
}
|
||||
'aliasgroup1': 'https://{{NEXTCLOUD_DOMAIN}}',
|
||||
'extra_params': '--o:ssl.enable=false --o:ssl.termination=true',
|
||||
},
|
||||
subdomain: "office",
|
||||
},
|
||||
subdomain: 'office',
|
||||
defaultPort: 9980,
|
||||
healthCheck: "/"
|
||||
}
|
||||
healthCheck: '/',
|
||||
},
|
||||
],
|
||||
network: {
|
||||
name: "dashcaddy-nextcloud",
|
||||
driver: "bridge"
|
||||
name: 'dashcaddy-nextcloud',
|
||||
driver: 'bridge',
|
||||
},
|
||||
sharedVolumes: {
|
||||
data: {
|
||||
label: "Cloud Storage",
|
||||
description: "Nextcloud data directory for user files",
|
||||
defaultPath: "/opt/nextcloud/data",
|
||||
usedBy: ["nextcloud"]
|
||||
}
|
||||
label: 'Cloud Storage',
|
||||
description: 'Nextcloud data directory for user files',
|
||||
defaultPath: '/opt/nextcloud/data',
|
||||
usedBy: ['nextcloud'],
|
||||
},
|
||||
},
|
||||
setupInstructions: [
|
||||
"Complete the Nextcloud initial setup wizard in the browser",
|
||||
"MariaDB and Redis are pre-configured and connected",
|
||||
"If Collabora is enabled, configure it in Nextcloud: Settings \u2192 Nextcloud Office",
|
||||
"Point Nextcloud Office to your Collabora URL (e.g., https://office.sami)",
|
||||
"Configure email, 2FA, and other settings in Nextcloud admin panel"
|
||||
]
|
||||
'Complete the Nextcloud initial setup wizard in the browser',
|
||||
'MariaDB and Redis are pre-configured and connected',
|
||||
'If Collabora is enabled, configure it in Nextcloud: Settings \u2192 Nextcloud Office',
|
||||
'Point Nextcloud Office to your Collabora URL (e.g., https://office.sami)',
|
||||
'Configure email, 2FA, and other settings in Nextcloud admin panel',
|
||||
],
|
||||
},
|
||||
|
||||
// === DEVELOPMENT ===
|
||||
"dev-environment": {
|
||||
name: "Dev Environment",
|
||||
description: "Self-hosted development workflow: Git, CI/CD, IDE, and database",
|
||||
icon: "\uD83D\uDCBB",
|
||||
category: "Development",
|
||||
type: "recipe",
|
||||
difficulty: "Advanced",
|
||||
'dev-environment': {
|
||||
name: 'Dev Environment',
|
||||
description: 'Self-hosted development workflow: Git, CI/CD, IDE, and database',
|
||||
icon: '\uD83D\uDCBB',
|
||||
category: 'Development',
|
||||
type: 'recipe',
|
||||
difficulty: 'Advanced',
|
||||
popularity: 82,
|
||||
components: [
|
||||
{
|
||||
id: "dev-postgres",
|
||||
role: "Database",
|
||||
id: 'dev-postgres',
|
||||
role: 'Database',
|
||||
required: true,
|
||||
order: 0,
|
||||
docker: {
|
||||
image: "postgres:16-alpine",
|
||||
image: 'postgres:16-alpine',
|
||||
ports: [],
|
||||
volumes: ["/opt/dev-postgres/data:/var/lib/postgresql/data"],
|
||||
volumes: ['/opt/dev-postgres/data:/var/lib/postgresql/data'],
|
||||
environment: {
|
||||
"POSTGRES_DB": "gitea",
|
||||
"POSTGRES_USER": "gitea",
|
||||
"POSTGRES_PASSWORD": "{{GENERATED_PASSWORD}}"
|
||||
}
|
||||
'POSTGRES_DB': 'gitea',
|
||||
'POSTGRES_USER': 'gitea',
|
||||
'POSTGRES_PASSWORD': '{{GENERATED_PASSWORD}}',
|
||||
},
|
||||
internal: true
|
||||
},
|
||||
internal: true,
|
||||
},
|
||||
{
|
||||
id: "gitea",
|
||||
role: "Git Server",
|
||||
templateRef: "gitea",
|
||||
id: 'gitea',
|
||||
role: 'Git Server',
|
||||
templateRef: 'gitea',
|
||||
required: true,
|
||||
order: 1,
|
||||
envOverrides: {
|
||||
"GITEA__database__DB_TYPE": "postgres",
|
||||
"GITEA__database__HOST": "dashcaddy-dev-postgres:5432",
|
||||
"GITEA__database__NAME": "gitea",
|
||||
"GITEA__database__USER": "gitea",
|
||||
"GITEA__database__PASSWD": "{{GENERATED_PASSWORD}}"
|
||||
}
|
||||
'GITEA__database__DB_TYPE': 'postgres',
|
||||
'GITEA__database__HOST': 'dashcaddy-dev-postgres:5432',
|
||||
'GITEA__database__NAME': 'gitea',
|
||||
'GITEA__database__USER': 'gitea',
|
||||
'GITEA__database__PASSWD': '{{GENERATED_PASSWORD}}',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "drone",
|
||||
role: "CI/CD Pipeline",
|
||||
templateRef: "drone",
|
||||
id: 'drone',
|
||||
role: 'CI/CD Pipeline',
|
||||
templateRef: 'drone',
|
||||
required: false,
|
||||
order: 2
|
||||
order: 2,
|
||||
},
|
||||
{
|
||||
id: "vscode-server",
|
||||
role: "Web IDE",
|
||||
templateRef: "vscode-server",
|
||||
id: 'vscode-server',
|
||||
role: 'Web IDE',
|
||||
templateRef: 'vscode-server',
|
||||
required: false,
|
||||
order: 3
|
||||
}
|
||||
order: 3,
|
||||
},
|
||||
],
|
||||
network: {
|
||||
name: "dashcaddy-dev",
|
||||
driver: "bridge"
|
||||
name: 'dashcaddy-dev',
|
||||
driver: 'bridge',
|
||||
},
|
||||
setupInstructions: [
|
||||
"Gitea is pre-configured with PostgreSQL database",
|
||||
"Complete the Gitea initial setup wizard in the browser",
|
||||
"If Drone CI is enabled, connect it to Gitea via OAuth application",
|
||||
"VS Code Server provides a full IDE in your browser",
|
||||
"All development services share a Docker network for inter-service communication"
|
||||
]
|
||||
'Gitea is pre-configured with PostgreSQL database',
|
||||
'Complete the Gitea initial setup wizard in the browser',
|
||||
'If Drone CI is enabled, connect it to Gitea via OAuth application',
|
||||
'VS Code Server provides a full IDE in your browser',
|
||||
'All development services share a Docker network for inter-service communication',
|
||||
],
|
||||
},
|
||||
|
||||
// === HOME AUTOMATION ===
|
||||
"smart-home": {
|
||||
name: "Smart Home Hub",
|
||||
description: "Home automation: control, automate, and monitor IoT devices",
|
||||
icon: "\uD83C\uDFE0",
|
||||
category: "Home Automation",
|
||||
type: "recipe",
|
||||
difficulty: "Intermediate",
|
||||
'smart-home': {
|
||||
name: 'Smart Home Hub',
|
||||
description: 'Home automation: control, automate, and monitor IoT devices',
|
||||
icon: '\uD83C\uDFE0',
|
||||
category: 'Home Automation',
|
||||
type: 'recipe',
|
||||
difficulty: 'Intermediate',
|
||||
popularity: 88,
|
||||
components: [
|
||||
{
|
||||
id: "mosquitto",
|
||||
role: "MQTT Broker",
|
||||
id: 'mosquitto',
|
||||
role: 'MQTT Broker',
|
||||
required: true,
|
||||
order: 0,
|
||||
docker: {
|
||||
image: "eclipse-mosquitto:2",
|
||||
ports: ["1883:1883", "9001:9001"],
|
||||
image: 'eclipse-mosquitto:2',
|
||||
ports: ['1883:1883', '9001:9001'],
|
||||
volumes: [
|
||||
"/opt/mosquitto/config:/mosquitto/config",
|
||||
"/opt/mosquitto/data:/mosquitto/data",
|
||||
"/opt/mosquitto/log:/mosquitto/log"
|
||||
'/opt/mosquitto/config:/mosquitto/config',
|
||||
'/opt/mosquitto/data:/mosquitto/data',
|
||||
'/opt/mosquitto/log:/mosquitto/log',
|
||||
],
|
||||
environment: {}
|
||||
environment: {},
|
||||
},
|
||||
subdomain: "mqtt",
|
||||
subdomain: 'mqtt',
|
||||
defaultPort: 1883,
|
||||
internal: false,
|
||||
setupNote: "MQTT broker for IoT device communication"
|
||||
setupNote: 'MQTT broker for IoT device communication',
|
||||
},
|
||||
{
|
||||
id: "homeassistant",
|
||||
role: "Automation Hub",
|
||||
templateRef: "homeassistant",
|
||||
id: 'homeassistant',
|
||||
role: 'Automation Hub',
|
||||
templateRef: 'homeassistant',
|
||||
required: true,
|
||||
order: 1
|
||||
order: 1,
|
||||
},
|
||||
{
|
||||
id: "nodered",
|
||||
role: "Flow Automation",
|
||||
templateRef: "nodered",
|
||||
id: 'nodered',
|
||||
role: 'Flow Automation',
|
||||
templateRef: 'nodered',
|
||||
required: true,
|
||||
order: 2
|
||||
order: 2,
|
||||
},
|
||||
{
|
||||
id: "zigbee2mqtt",
|
||||
role: "Zigbee Bridge",
|
||||
id: 'zigbee2mqtt',
|
||||
role: 'Zigbee Bridge',
|
||||
required: false,
|
||||
order: 3,
|
||||
docker: {
|
||||
image: "koenkk/zigbee2mqtt:latest",
|
||||
ports: ["{{PORT}}:8080"],
|
||||
volumes: ["/opt/zigbee2mqtt/data:/app/data"],
|
||||
image: 'koenkk/zigbee2mqtt:latest',
|
||||
ports: ['{{PORT}}:8080'],
|
||||
volumes: ['/opt/zigbee2mqtt/data:/app/data'],
|
||||
environment: {
|
||||
"TZ": "{{TIMEZONE}}"
|
||||
}
|
||||
'TZ': '{{TIMEZONE}}',
|
||||
},
|
||||
subdomain: "zigbee",
|
||||
},
|
||||
subdomain: 'zigbee',
|
||||
defaultPort: 8080,
|
||||
healthCheck: "/",
|
||||
note: "Requires a Zigbee USB adapter (e.g., Sonoff Zigbee 3.0 USB Dongle Plus)"
|
||||
}
|
||||
healthCheck: '/',
|
||||
note: 'Requires a Zigbee USB adapter (e.g., Sonoff Zigbee 3.0 USB Dongle Plus)',
|
||||
},
|
||||
],
|
||||
network: {
|
||||
name: "dashcaddy-smarthome",
|
||||
driver: "bridge"
|
||||
name: 'dashcaddy-smarthome',
|
||||
driver: 'bridge',
|
||||
},
|
||||
setupInstructions: [
|
||||
"Mosquitto MQTT broker is ready for IoT device connections on port 1883",
|
||||
"Complete the Home Assistant onboarding wizard in the browser",
|
||||
"Connect Home Assistant to MQTT: Settings \u2192 Integrations \u2192 MQTT",
|
||||
"Node-RED provides visual flow automation \u2014 connect it to MQTT for device control",
|
||||
"If Zigbee2MQTT is enabled, it requires a physical Zigbee USB adapter"
|
||||
]
|
||||
}
|
||||
'Mosquitto MQTT broker is ready for IoT device connections on port 1883',
|
||||
'Complete the Home Assistant onboarding wizard in the browser',
|
||||
'Connect Home Assistant to MQTT: Settings \u2192 Integrations \u2192 MQTT',
|
||||
'Node-RED provides visual flow automation \u2014 connect it to MQTT for device control',
|
||||
'If Zigbee2MQTT is enabled, it requires a physical Zigbee USB adapter',
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
// Recipe category metadata (separate from app categories)
|
||||
const RECIPE_CATEGORIES = {
|
||||
"Media": { icon: "\uD83C\uDFAC", color: "#e74c3c", description: "Media streaming and automation stacks" },
|
||||
"Productivity": { icon: "\u2601\uFE0F", color: "#3498db", description: "Cloud storage and office suites" },
|
||||
"Development": { icon: "\uD83D\uDCBB", color: "#9b59b6", description: "Self-hosted development environments" },
|
||||
"Home Automation": { icon: "\uD83C\uDFE0", color: "#27ae60", description: "IoT and smart home control" }
|
||||
'Media': { icon: '\uD83C\uDFAC', color: '#e74c3c', description: 'Media streaming and automation stacks' },
|
||||
'Productivity': { icon: '\u2601\uFE0F', color: '#3498db', description: 'Cloud storage and office suites' },
|
||||
'Development': { icon: '\uD83D\uDCBB', color: '#9b59b6', description: 'Self-hosted development environments' },
|
||||
'Home Automation': { icon: '\uD83C\uDFE0', color: '#27ae60', description: 'IoT and smart home control' },
|
||||
};
|
||||
|
||||
module.exports = { RECIPE_TEMPLATES, RECIPE_CATEGORIES };
|
||||
|
||||
@@ -144,28 +144,28 @@ class ResourceMonitor extends EventEmitter {
|
||||
timestamp: new Date().toISOString(),
|
||||
cpu: {
|
||||
percent: Math.round(cpuPercent * 100) / 100,
|
||||
usage: stats.cpu_stats.cpu_usage.total_usage
|
||||
usage: stats.cpu_stats.cpu_usage.total_usage,
|
||||
},
|
||||
memory: {
|
||||
usage: memoryUsage,
|
||||
limit: memoryLimit,
|
||||
percent: Math.round(memoryPercent * 100) / 100,
|
||||
usageMB: Math.round(memoryUsage / 1024 / 1024),
|
||||
limitMB: Math.round(memoryLimit / 1024 / 1024)
|
||||
limitMB: Math.round(memoryLimit / 1024 / 1024),
|
||||
},
|
||||
network: {
|
||||
rxBytes: networkRx,
|
||||
txBytes: networkTx,
|
||||
rxMB: Math.round(networkRx / 1024 / 1024 * 100) / 100,
|
||||
txMB: Math.round(networkTx / 1024 / 1024 * 100) / 100
|
||||
txMB: Math.round(networkTx / 1024 / 1024 * 100) / 100,
|
||||
},
|
||||
disk: {
|
||||
readBytes: blockRead,
|
||||
writeBytes: blockWrite,
|
||||
readMB: Math.round(blockRead / 1024 / 1024 * 100) / 100,
|
||||
writeMB: Math.round(blockWrite / 1024 / 1024 * 100) / 100
|
||||
writeMB: Math.round(blockWrite / 1024 / 1024 * 100) / 100,
|
||||
},
|
||||
pids: stats.pids_stats?.current || 0
|
||||
pids: stats.pids_stats?.current || 0,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -178,7 +178,7 @@ class ResourceMonitor extends EventEmitter {
|
||||
if (!this.stats.has(containerId)) {
|
||||
this.stats.set(containerId, {
|
||||
name: containerName,
|
||||
history: []
|
||||
history: [],
|
||||
});
|
||||
}
|
||||
|
||||
@@ -189,7 +189,7 @@ class ResourceMonitor extends EventEmitter {
|
||||
// Keep only recent stats (based on retention policy)
|
||||
const cutoffTime = Date.now() - (STATS_RETENTION_HOURS * 60 * 60 * 1000);
|
||||
containerStats.history = containerStats.history.filter(s =>
|
||||
new Date(s.timestamp).getTime() > cutoffTime
|
||||
new Date(s.timestamp).getTime() > cutoffTime,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -216,7 +216,7 @@ class ResourceMonitor extends EventEmitter {
|
||||
severity: 'warning',
|
||||
message: `CPU usage ${stats.cpu.percent.toFixed(1)}% exceeds threshold ${alertConfig.cpuThreshold}%`,
|
||||
value: stats.cpu.percent,
|
||||
threshold: alertConfig.cpuThreshold
|
||||
threshold: alertConfig.cpuThreshold,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -227,7 +227,7 @@ class ResourceMonitor extends EventEmitter {
|
||||
severity: 'warning',
|
||||
message: `Memory usage ${stats.memory.percent.toFixed(1)}% exceeds threshold ${alertConfig.memoryThreshold}%`,
|
||||
value: stats.memory.percent,
|
||||
threshold: alertConfig.memoryThreshold
|
||||
threshold: alertConfig.memoryThreshold,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -240,7 +240,7 @@ class ResourceMonitor extends EventEmitter {
|
||||
severity: 'warning',
|
||||
message: `Disk I/O ${diskIO.toFixed(1)} MB/s exceeds threshold ${alertConfig.diskIOThreshold} MB/s`,
|
||||
value: diskIO,
|
||||
threshold: alertConfig.diskIOThreshold
|
||||
threshold: alertConfig.diskIOThreshold,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -254,7 +254,7 @@ class ResourceMonitor extends EventEmitter {
|
||||
timestamp: new Date().toISOString(),
|
||||
alerts,
|
||||
stats,
|
||||
config: alertConfig
|
||||
config: alertConfig,
|
||||
});
|
||||
|
||||
// Auto-restart if configured
|
||||
@@ -278,7 +278,7 @@ class ResourceMonitor extends EventEmitter {
|
||||
containerId,
|
||||
containerName,
|
||||
timestamp: new Date().toISOString(),
|
||||
reason: alerts
|
||||
reason: alerts,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`[ResourceMonitor] Failed to restart ${containerName}:`, error.message);
|
||||
@@ -306,7 +306,7 @@ class ResourceMonitor extends EventEmitter {
|
||||
|
||||
const cutoffTime = Date.now() - (hours * 60 * 60 * 1000);
|
||||
return containerStats.history.filter(s =>
|
||||
new Date(s.timestamp).getTime() > cutoffTime
|
||||
new Date(s.timestamp).getTime() > cutoffTime,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -325,16 +325,16 @@ class ResourceMonitor extends EventEmitter {
|
||||
current: cpuValues[cpuValues.length - 1],
|
||||
avg: cpuValues.reduce((a, b) => a + b, 0) / cpuValues.length,
|
||||
max: Math.max(...cpuValues),
|
||||
min: Math.min(...cpuValues)
|
||||
min: Math.min(...cpuValues),
|
||||
},
|
||||
memory: {
|
||||
current: memoryValues[memoryValues.length - 1],
|
||||
avg: memoryValues.reduce((a, b) => a + b, 0) / memoryValues.length,
|
||||
max: Math.max(...memoryValues),
|
||||
min: Math.min(...memoryValues)
|
||||
min: Math.min(...memoryValues),
|
||||
},
|
||||
dataPoints: history.length,
|
||||
timeRange: hours
|
||||
timeRange: hours,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -352,7 +352,7 @@ class ResourceMonitor extends EventEmitter {
|
||||
name: data.name,
|
||||
current,
|
||||
aggregated,
|
||||
alertConfig: this.alerts.get(containerId)
|
||||
alertConfig: this.alerts.get(containerId),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -370,7 +370,7 @@ class ResourceMonitor extends EventEmitter {
|
||||
diskIOThreshold: config.diskIOThreshold || null,
|
||||
cooldownMinutes: config.cooldownMinutes || 15,
|
||||
autoRestart: config.autoRestart || false,
|
||||
notificationChannels: config.notificationChannels || []
|
||||
notificationChannels: config.notificationChannels || [],
|
||||
});
|
||||
|
||||
this.saveAlertConfig();
|
||||
@@ -400,7 +400,7 @@ class ResourceMonitor extends EventEmitter {
|
||||
|
||||
for (const [containerId, data] of this.stats.entries()) {
|
||||
data.history = data.history.filter(s =>
|
||||
new Date(s.timestamp).getTime() > cutoffTime
|
||||
new Date(s.timestamp).getTime() > cutoffTime,
|
||||
);
|
||||
|
||||
// Remove container stats if no recent data
|
||||
@@ -471,7 +471,7 @@ class ResourceMonitor extends EventEmitter {
|
||||
return {
|
||||
stats: Object.fromEntries(this.stats),
|
||||
alerts: Object.fromEntries(this.alerts),
|
||||
exportedAt: new Date().toISOString()
|
||||
exportedAt: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -62,7 +62,7 @@ module.exports = function(ctx, helpers) {
|
||||
ctx.log.info('deploy', 'DashCA: Using existing index.html');
|
||||
}
|
||||
|
||||
ctx.log.info('deploy', 'DashCA: For full features, copy certificate files to ' + destPath);
|
||||
ctx.log.info('deploy', `DashCA: For full features, copy certificate files to ${ destPath}`);
|
||||
ctx.log.info('deploy', 'DashCA: Static site deployment completed successfully');
|
||||
} catch (error) {
|
||||
ctx.log.error('deploy', 'DashCA deployment error', { error: error.message });
|
||||
@@ -121,14 +121,14 @@ module.exports = function(ctx, helpers) {
|
||||
PortBindings: {},
|
||||
Binds: translatedVolumes,
|
||||
RestartPolicy: { Name: 'unless-stopped' },
|
||||
LogConfig: DOCKER.LOG_CONFIG
|
||||
LogConfig: DOCKER.LOG_CONFIG,
|
||||
},
|
||||
Env: Object.entries(processedTemplate.docker.environment || {}).map(([k, v]) => `${k}=${v}`),
|
||||
Labels: {
|
||||
'sami.managed': 'true', 'sami.app': appId,
|
||||
'sami.subdomain': userConfig.subdomain,
|
||||
'sami.deployed': new Date().toISOString()
|
||||
}
|
||||
'sami.deployed': new Date().toISOString(),
|
||||
},
|
||||
};
|
||||
|
||||
processedTemplate.docker.ports.forEach(portMapping => {
|
||||
@@ -164,7 +164,7 @@ module.exports = function(ctx, helpers) {
|
||||
try {
|
||||
const pruneResult = await ctx.docker.client.pruneImages({ filters: { dangling: { true: true } } });
|
||||
if (pruneResult.SpaceReclaimed > 0) {
|
||||
ctx.log.info('docker', 'Pruned dangling images after deploy', { spaceReclaimed: Math.round(pruneResult.SpaceReclaimed / 1024 / 1024) + 'MB' });
|
||||
ctx.log.info('docker', 'Pruned dangling images after deploy', { spaceReclaimed: `${Math.round(pruneResult.SpaceReclaimed / 1024 / 1024) }MB` });
|
||||
}
|
||||
} catch (pruneErr) {
|
||||
ctx.log.debug('docker', 'Image prune after deploy failed', { error: pruneErr.message });
|
||||
@@ -324,7 +324,7 @@ module.exports = function(ctx, helpers) {
|
||||
tailscaleOnly: config.tailscaleOnly || false,
|
||||
allowedIPs: config.allowedIPs || [],
|
||||
customVolumes: config.customVolumes || undefined,
|
||||
useExisting: false
|
||||
useExisting: false,
|
||||
},
|
||||
container: template.isStaticSite ? null : {
|
||||
image: processedTemplate.docker.image,
|
||||
@@ -340,14 +340,14 @@ module.exports = function(ctx, helpers) {
|
||||
}
|
||||
return env;
|
||||
})(),
|
||||
capabilities: processedTemplate.docker.capabilities || undefined
|
||||
capabilities: processedTemplate.docker.capabilities || undefined,
|
||||
},
|
||||
caddy: {
|
||||
tailscaleOnly: config.tailscaleOnly || false,
|
||||
allowedIPs: config.allowedIPs || [],
|
||||
subpathSupport: template.subpathSupport || 'strip',
|
||||
routingMode: ctx.siteConfig.routingMode
|
||||
}
|
||||
routingMode: ctx.siteConfig.routingMode,
|
||||
},
|
||||
};
|
||||
|
||||
await ctx.addServiceToConfig({
|
||||
@@ -358,7 +358,7 @@ module.exports = function(ctx, helpers) {
|
||||
tailscaleOnly: config.tailscaleOnly || false,
|
||||
routingMode: ctx.siteConfig.routingMode,
|
||||
deployedAt: new Date().toISOString(),
|
||||
deploymentManifest
|
||||
deploymentManifest,
|
||||
});
|
||||
ctx.log.info('deploy', 'Service added to dashboard', { subdomain: config.subdomain });
|
||||
|
||||
@@ -366,7 +366,7 @@ module.exports = function(ctx, helpers) {
|
||||
success: true, containerId, usedExisting,
|
||||
url: serviceUrl,
|
||||
message: usedExisting ? `${template.name} configured using existing container!` : `${template.name} deployed successfully!`,
|
||||
setupInstructions: template.setupInstructions || []
|
||||
setupInstructions: template.setupInstructions || [],
|
||||
};
|
||||
if (dnsWarning) response.warning = dnsWarning;
|
||||
|
||||
|
||||
@@ -38,16 +38,16 @@ module.exports = function(ctx) {
|
||||
const templateImage = template.docker.image.split(':')[0];
|
||||
for (const container of containers) {
|
||||
const containerImage = container.Image.split(':')[0];
|
||||
if (containerImage === templateImage || containerImage.endsWith('/' + templateImage)) {
|
||||
if (containerImage === templateImage || containerImage.endsWith(`/${ templateImage}`)) {
|
||||
const ports = container.Ports.filter(p => p.PublicPort).map(p => ({
|
||||
hostPort: p.PublicPort, containerPort: p.PrivatePort, protocol: p.Type
|
||||
hostPort: p.PublicPort, containerPort: p.PrivatePort, protocol: p.Type,
|
||||
}));
|
||||
return {
|
||||
id: container.Id, shortId: container.Id.slice(0, 12),
|
||||
name: container.Names[0]?.replace(/^\//, '') || 'unknown',
|
||||
image: container.Image, status: container.Status, state: container.State,
|
||||
ports, primaryPort: ports.length > 0 ? ports[0].hostPort : null,
|
||||
labels: container.Labels || {}
|
||||
labels: container.Labels || {},
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -72,7 +72,7 @@ module.exports = function(ctx) {
|
||||
'{{PORT}}': config.port || template.defaultPort,
|
||||
'{{MEDIA_PATH}}': mediaPaths[0] || '/media',
|
||||
'{{TIMEZONE}}': ctx.siteConfig.timezone || 'UTC',
|
||||
'{{GENERATED_SECRET}}': crypto.randomBytes(32).toString('hex')
|
||||
'{{GENERATED_SECRET}}': crypto.randomBytes(32).toString('hex'),
|
||||
};
|
||||
|
||||
function replaceInObject(obj) {
|
||||
@@ -117,7 +117,7 @@ module.exports = function(ctx) {
|
||||
const basePath = `/${config.subdomain}`;
|
||||
// Some apps need the full URL, not just the path
|
||||
if (['GF_SERVER_ROOT_URL', 'GITEA__server__ROOT_URL'].includes(template.urlBaseEnv)) {
|
||||
processed.docker.environment[template.urlBaseEnv] = ctx.buildServiceUrl(config.subdomain) + '/';
|
||||
processed.docker.environment[template.urlBaseEnv] = `${ctx.buildServiceUrl(config.subdomain) }/`;
|
||||
} else {
|
||||
processed.docker.environment[template.urlBaseEnv] = basePath;
|
||||
}
|
||||
@@ -137,7 +137,7 @@ module.exports = function(ctx) {
|
||||
config.mediaPath.split(',').map(p => p.trim()).filter(Boolean).forEach(p => allowedRoots.push(path.resolve(p)));
|
||||
}
|
||||
const isAllowed = allowedRoots.some(root =>
|
||||
normalizedHost === root || normalizedHost.startsWith(root + path.sep)
|
||||
normalizedHost === root || normalizedHost.startsWith(root + path.sep),
|
||||
);
|
||||
if (!isAllowed) {
|
||||
ctx.log.warn('deploy', 'Custom volume host path rejected', { hostPath: override.hostPath, allowed: allowedRoots });
|
||||
@@ -162,76 +162,76 @@ module.exports = function(ctx) {
|
||||
c += ` root * ${sitePath}\n\n`;
|
||||
|
||||
if (tailscaleOnly) {
|
||||
c += ` @blocked not remote_ip 100.64.0.0/10\n`;
|
||||
c += ` respond @blocked "Access denied. Tailscale connection required." 403\n\n`;
|
||||
c += ' @blocked not remote_ip 100.64.0.0/10\n';
|
||||
c += ' respond @blocked "Access denied. Tailscale connection required." 403\n\n';
|
||||
}
|
||||
|
||||
if (apiProxy) {
|
||||
c += ` handle /api/* {\n`;
|
||||
c += ' handle /api/* {\n';
|
||||
c += ` reverse_proxy ${apiProxy}\n`;
|
||||
c += ` }\n\n`;
|
||||
c += ' }\n\n';
|
||||
}
|
||||
|
||||
c += ` @crt path *.crt\n`;
|
||||
c += ` handle @crt {\n`;
|
||||
c += ` header Content-Type application/x-x509-ca-cert\n`;
|
||||
c += ` header Content-Disposition "attachment; filename=\\"{file}\\""\n`;
|
||||
c += ` header Cache-Control "public, max-age=86400"\n`;
|
||||
c += ` file_server\n`;
|
||||
c += ` }\n\n`;
|
||||
c += ` @der path *.der\n`;
|
||||
c += ` handle @der {\n`;
|
||||
c += ` header Content-Type application/x-x509-ca-cert\n`;
|
||||
c += ` header Content-Disposition "attachment; filename=\\"{file}\\""\n`;
|
||||
c += ` header Cache-Control "public, max-age=86400"\n`;
|
||||
c += ` file_server\n`;
|
||||
c += ` }\n\n`;
|
||||
c += ` @mobileconfig path *.mobileconfig\n`;
|
||||
c += ` handle @mobileconfig {\n`;
|
||||
c += ` header Content-Type application/x-apple-aspen-config\n`;
|
||||
c += ` header Content-Disposition "attachment; filename=\\"{file}\\""\n`;
|
||||
c += ` header Cache-Control "public, max-age=86400"\n`;
|
||||
c += ` file_server\n`;
|
||||
c += ` }\n\n`;
|
||||
c += ` @ps1 path *.ps1\n`;
|
||||
c += ` handle @ps1 {\n`;
|
||||
c += ` header Content-Type text/plain\n`;
|
||||
c += ` header Content-Disposition "attachment; filename=\\"{file}\\""\n`;
|
||||
c += ` file_server\n`;
|
||||
c += ` }\n\n`;
|
||||
c += ` @sh path *.sh\n`;
|
||||
c += ` handle @sh {\n`;
|
||||
c += ` header Content-Type text/x-shellscript\n`;
|
||||
c += ` header Content-Disposition "attachment; filename=\\"{file}\\""\n`;
|
||||
c += ` file_server\n`;
|
||||
c += ` }\n\n`;
|
||||
c += ` # Static site with SPA fallback\n`;
|
||||
c += ` handle {\n`;
|
||||
c += ` @notFile not file {path}\n`;
|
||||
c += ` rewrite @notFile /index.html\n`;
|
||||
c += ` file_server\n`;
|
||||
c += ` }\n\n`;
|
||||
c += ` # No cache for HTML\n`;
|
||||
c += ` @htmlfiles {\n`;
|
||||
c += ` path *.html\n`;
|
||||
c += ` path /\n`;
|
||||
c += ` }\n`;
|
||||
c += ` header @htmlfiles Cache-Control "no-store"\n`;
|
||||
c += ' @crt path *.crt\n';
|
||||
c += ' handle @crt {\n';
|
||||
c += ' header Content-Type application/x-x509-ca-cert\n';
|
||||
c += ' header Content-Disposition "attachment; filename=\\"{file}\\""\n';
|
||||
c += ' header Cache-Control "public, max-age=86400"\n';
|
||||
c += ' file_server\n';
|
||||
c += ' }\n\n';
|
||||
c += ' @der path *.der\n';
|
||||
c += ' handle @der {\n';
|
||||
c += ' header Content-Type application/x-x509-ca-cert\n';
|
||||
c += ' header Content-Disposition "attachment; filename=\\"{file}\\""\n';
|
||||
c += ' header Cache-Control "public, max-age=86400"\n';
|
||||
c += ' file_server\n';
|
||||
c += ' }\n\n';
|
||||
c += ' @mobileconfig path *.mobileconfig\n';
|
||||
c += ' handle @mobileconfig {\n';
|
||||
c += ' header Content-Type application/x-apple-aspen-config\n';
|
||||
c += ' header Content-Disposition "attachment; filename=\\"{file}\\""\n';
|
||||
c += ' header Cache-Control "public, max-age=86400"\n';
|
||||
c += ' file_server\n';
|
||||
c += ' }\n\n';
|
||||
c += ' @ps1 path *.ps1\n';
|
||||
c += ' handle @ps1 {\n';
|
||||
c += ' header Content-Type text/plain\n';
|
||||
c += ' header Content-Disposition "attachment; filename=\\"{file}\\""\n';
|
||||
c += ' file_server\n';
|
||||
c += ' }\n\n';
|
||||
c += ' @sh path *.sh\n';
|
||||
c += ' handle @sh {\n';
|
||||
c += ' header Content-Type text/x-shellscript\n';
|
||||
c += ' header Content-Disposition "attachment; filename=\\"{file}\\""\n';
|
||||
c += ' file_server\n';
|
||||
c += ' }\n\n';
|
||||
c += ' # Static site with SPA fallback\n';
|
||||
c += ' handle {\n';
|
||||
c += ' @notFile not file {path}\n';
|
||||
c += ' rewrite @notFile /index.html\n';
|
||||
c += ' file_server\n';
|
||||
c += ' }\n\n';
|
||||
c += ' # No cache for HTML\n';
|
||||
c += ' @htmlfiles {\n';
|
||||
c += ' path *.html\n';
|
||||
c += ' path /\n';
|
||||
c += ' }\n';
|
||||
c += ' header @htmlfiles Cache-Control "no-store"\n';
|
||||
return c;
|
||||
}
|
||||
|
||||
// HTTPS block
|
||||
let config = `${domain} {\n`;
|
||||
config += ` tls internal\n\n`;
|
||||
config += ' tls internal\n\n';
|
||||
config += siteBlockContent();
|
||||
config += `}`;
|
||||
config += '}';
|
||||
|
||||
// HTTP companion block for devices that haven't trusted the CA yet
|
||||
if (httpAccess) {
|
||||
config += `\n\n# HTTP access for first-time certificate installation\n`;
|
||||
config += '\n\n# HTTP access for first-time certificate installation\n';
|
||||
config += `http://${domain} {\n`;
|
||||
config += siteBlockContent();
|
||||
config += `}`;
|
||||
config += '}';
|
||||
}
|
||||
|
||||
return config;
|
||||
@@ -254,7 +254,7 @@ module.exports = function(ctx) {
|
||||
} else if (healthPath && port && httpCheckFailed < 5) {
|
||||
try {
|
||||
const response = await ctx.fetchT(`http://localhost:${port}${healthPath}`, {
|
||||
signal: AbortSignal.timeout(3000), redirect: 'manual'
|
||||
signal: AbortSignal.timeout(3000), redirect: 'manual',
|
||||
});
|
||||
if (response.ok || (response.status >= 300 && response.status < 400)) {
|
||||
ctx.log.info('docker', 'Health check passed', { containerId, status: response.status });
|
||||
@@ -290,7 +290,7 @@ module.exports = function(ctx) {
|
||||
await ctx.caddy.reload(existing);
|
||||
return;
|
||||
}
|
||||
const result = await ctx.caddy.modify(c => c + `\n${config}\n`);
|
||||
const result = await ctx.caddy.modify(c => `${c }\n${config}\n`);
|
||||
if (!result.success) throw new Error(`[DC-303] Failed to add Caddy config for ${domain}: ${result.error}`);
|
||||
await ctx.caddy.verifySite(domain);
|
||||
}
|
||||
@@ -405,6 +405,6 @@ module.exports = function(ctx) {
|
||||
removeSubpathConfig,
|
||||
ensureMainDomainBlock,
|
||||
RESERVED_SUBPATHS,
|
||||
generateStaticSiteConfig
|
||||
generateStaticSiteConfig,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -26,7 +26,7 @@ module.exports = function(ctx, helpers) {
|
||||
try {
|
||||
const pruneResult = await ctx.docker.client.pruneImages({ filters: { dangling: { true: true } } });
|
||||
if (pruneResult.SpaceReclaimed > 0) {
|
||||
ctx.log.info('docker', 'Pruned dangling images after removal', { spaceReclaimed: Math.round(pruneResult.SpaceReclaimed / 1024 / 1024) + 'MB' });
|
||||
ctx.log.info('docker', 'Pruned dangling images after removal', { spaceReclaimed: `${Math.round(pruneResult.SpaceReclaimed / 1024 / 1024) }MB` });
|
||||
}
|
||||
} catch (pruneErr) {
|
||||
ctx.log.debug('docker', 'Image prune after removal failed', { error: pruneErr.message });
|
||||
@@ -42,7 +42,7 @@ module.exports = function(ctx, helpers) {
|
||||
try {
|
||||
const domain = ctx.buildDomain(subdomain);
|
||||
const getResult = await ctx.dns.call(ctx.siteConfig.dnsServerIp, '/api/zones/records/get', {
|
||||
token: ctx.dns.getToken(), domain, zone: ctx.siteConfig.tld.replace(/^\./, ''), listZone: 'true'
|
||||
token: ctx.dns.getToken(), domain, zone: ctx.siteConfig.tld.replace(/^\./, ''), listZone: 'true',
|
||||
});
|
||||
let recordIp = ip || 'localhost';
|
||||
if (getResult.status === 'ok' && getResult.response?.records) {
|
||||
@@ -50,7 +50,7 @@ module.exports = function(ctx, helpers) {
|
||||
if (aRecord && aRecord.rData?.ipAddress) recordIp = aRecord.rData.ipAddress;
|
||||
}
|
||||
const dnsResult = await ctx.dns.call(ctx.siteConfig.dnsServerIp, '/api/zones/records/delete', {
|
||||
token: ctx.dns.getToken(), domain, type: 'A', ipAddress: recordIp
|
||||
token: ctx.dns.getToken(), domain, type: 'A', ipAddress: recordIp,
|
||||
});
|
||||
results.dns = dnsResult.status === 'ok' ? 'deleted' : (dnsResult.errorMessage || 'failed');
|
||||
ctx.log.info('dns', 'DNS record removal', { result: results.dns });
|
||||
|
||||
@@ -37,7 +37,7 @@ module.exports = function(ctx, helpers) {
|
||||
return res.json({
|
||||
success: true,
|
||||
message: 'No services have deployment manifests to restore',
|
||||
results: []
|
||||
results: [],
|
||||
});
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ module.exports = function(ctx, helpers) {
|
||||
id: service.id,
|
||||
name: service.name,
|
||||
status: 'failed',
|
||||
error: error.message
|
||||
error: error.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -63,7 +63,7 @@ module.exports = function(ctx, helpers) {
|
||||
res.json({
|
||||
success: true,
|
||||
message: `Restore complete: ${succeeded} restored, ${skipped} skipped, ${failed} failed`,
|
||||
results
|
||||
results,
|
||||
});
|
||||
}, 'apps-restore-all'));
|
||||
|
||||
@@ -81,7 +81,7 @@ module.exports = function(ctx, helpers) {
|
||||
hasManifest: !!service.deploymentManifest,
|
||||
templateId: service.deploymentManifest?.templateId || service.appTemplate || null,
|
||||
deployedAt: service.deployedAt || null,
|
||||
containerRunning: false
|
||||
containerRunning: false,
|
||||
};
|
||||
|
||||
// Check if container is currently running
|
||||
@@ -125,7 +125,7 @@ module.exports = function(ctx, helpers) {
|
||||
name: service.name,
|
||||
status: 'restored',
|
||||
type: 'static',
|
||||
message: `Static site "${service.name}" config preserved`
|
||||
message: `Static site "${service.name}" config preserved`,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -140,7 +140,7 @@ module.exports = function(ctx, helpers) {
|
||||
id: service.id,
|
||||
name: service.name,
|
||||
status: 'skipped',
|
||||
message: 'Container already running'
|
||||
message: 'Container already running',
|
||||
};
|
||||
}
|
||||
} catch (e) {
|
||||
@@ -164,7 +164,7 @@ module.exports = function(ctx, helpers) {
|
||||
id: service.id,
|
||||
name: service.name,
|
||||
status: 'skipped',
|
||||
message: 'Container already running (found by name)'
|
||||
message: 'Container already running (found by name)',
|
||||
};
|
||||
}
|
||||
// Exists but not running — remove stale container
|
||||
@@ -178,7 +178,7 @@ module.exports = function(ctx, helpers) {
|
||||
id: service.id,
|
||||
name: service.name,
|
||||
status: 'failed',
|
||||
error: 'No container configuration in manifest'
|
||||
error: 'No container configuration in manifest',
|
||||
};
|
||||
}
|
||||
|
||||
@@ -189,7 +189,7 @@ module.exports = function(ctx, helpers) {
|
||||
} catch (e) {
|
||||
// Check if image exists locally
|
||||
const images = await ctx.docker.client.listImages({
|
||||
filters: { reference: [manifest.container.image] }
|
||||
filters: { reference: [manifest.container.image] },
|
||||
});
|
||||
if (images.length === 0) {
|
||||
throw new Error(`Failed to pull image ${manifest.container.image}: ${e.message}`);
|
||||
@@ -206,7 +206,7 @@ module.exports = function(ctx, helpers) {
|
||||
PortBindings: {},
|
||||
Binds: manifest.container.volumes || [],
|
||||
RestartPolicy: { Name: 'unless-stopped' },
|
||||
LogConfig: DOCKER.LOG_CONFIG
|
||||
LogConfig: DOCKER.LOG_CONFIG,
|
||||
},
|
||||
Env: Object.entries(manifest.container.environment || {}).map(([k, v]) => `${k}=${v}`),
|
||||
Labels: {
|
||||
@@ -214,8 +214,8 @@ module.exports = function(ctx, helpers) {
|
||||
'sami.app': manifest.templateId,
|
||||
'sami.subdomain': manifest.config.subdomain,
|
||||
'sami.deployed': new Date().toISOString(),
|
||||
'sami.restored': 'true'
|
||||
}
|
||||
'sami.restored': 'true',
|
||||
},
|
||||
};
|
||||
|
||||
// Set up port bindings
|
||||
@@ -287,7 +287,7 @@ module.exports = function(ctx, helpers) {
|
||||
status: 'restored',
|
||||
type: 'container',
|
||||
containerId: container.id,
|
||||
message: `${service.name} restored successfully`
|
||||
message: `${service.name} restored successfully`,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ module.exports = function(ctx, helpers) {
|
||||
success: true,
|
||||
templates: ctx.APP_TEMPLATES,
|
||||
categories: ctx.TEMPLATE_CATEGORIES,
|
||||
difficultyLevels: ctx.DIFFICULTY_LEVELS
|
||||
difficultyLevels: ctx.DIFFICULTY_LEVELS,
|
||||
});
|
||||
}, 'apps-templates'));
|
||||
|
||||
@@ -71,7 +71,7 @@ module.exports = function(ctx, helpers) {
|
||||
try {
|
||||
const oldDomain = oldSubdomain.includes('.') ? oldSubdomain : ctx.buildDomain(oldSubdomain);
|
||||
const result = await ctx.dns.call(ctx.siteConfig.dnsServerIp, '/api/zones/records/delete', {
|
||||
token: ctx.dns.getToken(), domain: oldDomain, type: 'A', ipAddress: ip || 'localhost'
|
||||
token: ctx.dns.getToken(), domain: oldDomain, type: 'A', ipAddress: ip || 'localhost',
|
||||
});
|
||||
results.oldDns = result.status === 'ok' ? 'deleted' : result.errorMessage;
|
||||
ctx.log.info('dns', 'Old DNS record deleted', { domain: oldDomain });
|
||||
@@ -139,7 +139,7 @@ module.exports = function(ctx, helpers) {
|
||||
success: true,
|
||||
message: `Subdomain updated: ${oldSubdomain} -> ${newSubdomain}`,
|
||||
newUrl: `https://${ctx.buildDomain(newSubdomain)}`,
|
||||
results
|
||||
results,
|
||||
});
|
||||
}, 'update-subdomain'));
|
||||
|
||||
|
||||
@@ -11,12 +11,12 @@ module.exports = function(ctx, helpers) {
|
||||
const results = { radarr: null, sonarr: null };
|
||||
|
||||
// Step 1: Authenticate with Overseerr via Plex token
|
||||
let overseerrUrl = `http://host.docker.internal:${APP_PORTS.overseerr}`;
|
||||
const overseerrUrl = `http://host.docker.internal:${APP_PORTS.overseerr}`;
|
||||
const overseerrSession = await helpers.getOverseerrSession();
|
||||
|
||||
if (!overseerrSession) {
|
||||
return ctx.errorResponse(res, 502, 'Could not authenticate with Overseerr. Make sure Plex and Overseerr are running.', {
|
||||
hint: 'Complete Overseerr setup wizard and link your Plex account first, then try again.'
|
||||
hint: 'Complete Overseerr setup wizard and link your Plex account first, then try again.',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -30,8 +30,8 @@ module.exports = function(ctx, helpers) {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Cookie': overseerrSession.cookie,
|
||||
...options.headers
|
||||
}
|
||||
...options.headers,
|
||||
},
|
||||
});
|
||||
return response;
|
||||
};
|
||||
@@ -41,12 +41,12 @@ module.exports = function(ctx, helpers) {
|
||||
const statusRes = await overseerrFetch('/api/v1/status');
|
||||
if (!statusRes.ok) {
|
||||
return ctx.errorResponse(res, 502, 'Cannot connect to Overseerr', {
|
||||
hint: 'Make sure Overseerr is running on port 5055'
|
||||
hint: 'Make sure Overseerr is running on port 5055',
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
return ctx.errorResponse(res, 502, `Cannot reach Overseerr: ${e.message}`, {
|
||||
hint: 'Check if Overseerr container is running'
|
||||
hint: 'Check if Overseerr container is running',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -59,14 +59,14 @@ module.exports = function(ctx, helpers) {
|
||||
|
||||
// Fetch quality profiles from Radarr
|
||||
const profilesRes = await ctx.fetchT(`${radarrBaseUrl}/api/v3/qualityprofile`, {
|
||||
headers: { 'X-Api-Key': radarr.apiKey }
|
||||
headers: { 'X-Api-Key': radarr.apiKey },
|
||||
});
|
||||
const profiles = profilesRes.ok ? await profilesRes.json() : [];
|
||||
const defaultProfile = profiles[0] || { id: 1, name: 'Any' };
|
||||
|
||||
// Fetch root folders from Radarr
|
||||
const rootFoldersRes = await ctx.fetchT(`${radarrBaseUrl}/api/v3/rootfolder`, {
|
||||
headers: { 'X-Api-Key': radarr.apiKey }
|
||||
headers: { 'X-Api-Key': radarr.apiKey },
|
||||
});
|
||||
const rootFolders = rootFoldersRes.ok ? await rootFoldersRes.json() : [];
|
||||
const defaultRootFolder = rootFolders[0]?.path || '/movies';
|
||||
@@ -87,12 +87,12 @@ module.exports = function(ctx, helpers) {
|
||||
minimumAvailability: 'released',
|
||||
isDefault: true,
|
||||
externalUrl: radarr.url,
|
||||
tags: []
|
||||
tags: [],
|
||||
};
|
||||
|
||||
const radarrRes = await overseerrFetch('/api/v1/settings/radarr', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(radarrConfig)
|
||||
body: JSON.stringify(radarrConfig),
|
||||
});
|
||||
|
||||
if (radarrRes.ok) {
|
||||
@@ -115,14 +115,14 @@ module.exports = function(ctx, helpers) {
|
||||
|
||||
// Fetch quality profiles from Sonarr
|
||||
const profilesRes = await ctx.fetchT(`${sonarrBaseUrl}/api/v3/qualityprofile`, {
|
||||
headers: { 'X-Api-Key': sonarr.apiKey }
|
||||
headers: { 'X-Api-Key': sonarr.apiKey },
|
||||
});
|
||||
const profiles = profilesRes.ok ? await profilesRes.json() : [];
|
||||
const defaultProfile = profiles[0] || { id: 1, name: 'Any' };
|
||||
|
||||
// Fetch root folders from Sonarr
|
||||
const rootFoldersRes = await ctx.fetchT(`${sonarrBaseUrl}/api/v3/rootfolder`, {
|
||||
headers: { 'X-Api-Key': sonarr.apiKey }
|
||||
headers: { 'X-Api-Key': sonarr.apiKey },
|
||||
});
|
||||
const rootFolders = rootFoldersRes.ok ? await rootFoldersRes.json() : [];
|
||||
const defaultRootFolder = rootFolders[0]?.path || '/tv';
|
||||
@@ -131,7 +131,7 @@ module.exports = function(ctx, helpers) {
|
||||
let languageProfileId = 1;
|
||||
try {
|
||||
const langRes = await ctx.fetchT(`${sonarrBaseUrl}/api/v3/languageprofile`, {
|
||||
headers: { 'X-Api-Key': sonarr.apiKey }
|
||||
headers: { 'X-Api-Key': sonarr.apiKey },
|
||||
});
|
||||
if (langRes.ok) {
|
||||
const langProfiles = await langRes.json();
|
||||
@@ -158,12 +158,12 @@ module.exports = function(ctx, helpers) {
|
||||
isDefault: true,
|
||||
enableSeasonFolders: true,
|
||||
externalUrl: sonarr.url,
|
||||
tags: []
|
||||
tags: [],
|
||||
};
|
||||
|
||||
const sonarrRes = await overseerrFetch('/api/v1/settings/sonarr', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(sonarrConfig)
|
||||
body: JSON.stringify(sonarrConfig),
|
||||
});
|
||||
|
||||
if (sonarrRes.ok) {
|
||||
@@ -182,7 +182,7 @@ module.exports = function(ctx, helpers) {
|
||||
res.json({
|
||||
success: anyConfigured,
|
||||
message: anyConfigured ? 'Services configured in Overseerr' : 'Configuration failed',
|
||||
results
|
||||
results,
|
||||
});
|
||||
}, 'arr-configure-overseerr'));
|
||||
|
||||
@@ -210,7 +210,7 @@ module.exports = function(ctx, helpers) {
|
||||
}
|
||||
|
||||
// Normalize URL - remove trailing slash
|
||||
let baseUrl = url.replace(/\/+$/, '');
|
||||
const baseUrl = url.replace(/\/+$/, '');
|
||||
|
||||
// Build the API endpoint
|
||||
let apiEndpoint;
|
||||
@@ -233,7 +233,7 @@ module.exports = function(ctx, helpers) {
|
||||
const response = await ctx.fetchT(apiEndpoint, {
|
||||
method: 'GET',
|
||||
headers,
|
||||
signal: AbortSignal.timeout(10000)
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
@@ -244,7 +244,7 @@ module.exports = function(ctx, helpers) {
|
||||
return res.json({
|
||||
success: true,
|
||||
version,
|
||||
appName
|
||||
appName,
|
||||
});
|
||||
} else if (response.status === 401) {
|
||||
return ctx.errorResponse(res, 401, 'Invalid API key');
|
||||
@@ -288,7 +288,7 @@ module.exports = function(ctx, helpers) {
|
||||
containerName: container.Names[0]?.replace(/^\//, ''),
|
||||
port: exposedPort,
|
||||
url: `http://host.docker.internal:${exposedPort}`,
|
||||
localUrl: `http://localhost:${exposedPort}`
|
||||
localUrl: `http://localhost:${exposedPort}`,
|
||||
};
|
||||
|
||||
// Extract API key for arr services
|
||||
@@ -305,7 +305,7 @@ module.exports = function(ctx, helpers) {
|
||||
radarrFound: !!detected.radarr?.apiKey,
|
||||
sonarrFound: !!detected.sonarr?.apiKey,
|
||||
lidarrFound: !!detected.lidarr?.apiKey,
|
||||
prowlarrFound: !!detected.prowlarr?.apiKey
|
||||
prowlarrFound: !!detected.prowlarr?.apiKey,
|
||||
};
|
||||
|
||||
ctx.log.info('arr', 'Detected services', summary);
|
||||
@@ -313,14 +313,14 @@ module.exports = function(ctx, helpers) {
|
||||
if (!summary.overseerrFound) {
|
||||
return ctx.errorResponse(res, 400, 'Overseerr is not running. Deploy it first.', {
|
||||
detected,
|
||||
summary
|
||||
summary,
|
||||
});
|
||||
}
|
||||
|
||||
if (!summary.radarrFound && !summary.sonarrFound) {
|
||||
return ctx.errorResponse(res, 400, 'No Radarr or Sonarr found with valid API keys. Deploy at least one first.', {
|
||||
detected,
|
||||
summary
|
||||
summary,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -331,7 +331,7 @@ module.exports = function(ctx, helpers) {
|
||||
return ctx.errorResponse(res, 502, 'Could not authenticate with Overseerr. Make sure Plex and Overseerr are running.', {
|
||||
setupUrl: detected.overseerr.localUrl,
|
||||
detected,
|
||||
summary
|
||||
summary,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -344,8 +344,8 @@ module.exports = function(ctx, helpers) {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Cookie': overseerrSession.cookie,
|
||||
...options.headers
|
||||
}
|
||||
...options.headers,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
@@ -356,14 +356,14 @@ module.exports = function(ctx, helpers) {
|
||||
try {
|
||||
// Fetch quality profiles from Radarr
|
||||
const profilesRes = await ctx.fetchT(`${detected.radarr.localUrl}/api/v3/qualityprofile`, {
|
||||
headers: { 'X-Api-Key': detected.radarr.apiKey }
|
||||
headers: { 'X-Api-Key': detected.radarr.apiKey },
|
||||
});
|
||||
const profiles = profilesRes.ok ? await profilesRes.json() : [];
|
||||
const defaultProfile = profiles[0] || { id: 1, name: 'Any' };
|
||||
|
||||
// Fetch root folders from Radarr
|
||||
const rootFoldersRes = await ctx.fetchT(`${detected.radarr.localUrl}/api/v3/rootfolder`, {
|
||||
headers: { 'X-Api-Key': detected.radarr.apiKey }
|
||||
headers: { 'X-Api-Key': detected.radarr.apiKey },
|
||||
});
|
||||
const rootFolders = rootFoldersRes.ok ? await rootFoldersRes.json() : [];
|
||||
const defaultRootFolder = rootFolders[0]?.path || '/movies';
|
||||
@@ -384,12 +384,12 @@ module.exports = function(ctx, helpers) {
|
||||
minimumAvailability: 'released',
|
||||
isDefault: true,
|
||||
externalUrl: detected.radarr.localUrl,
|
||||
tags: []
|
||||
tags: [],
|
||||
};
|
||||
|
||||
const resp = await overseerrFetch('/api/v1/settings/radarr', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(radarrConfig)
|
||||
body: JSON.stringify(radarrConfig),
|
||||
});
|
||||
|
||||
configResults.radarr = resp.ok ? 'configured' : `failed: ${await resp.text()}`;
|
||||
@@ -403,14 +403,14 @@ module.exports = function(ctx, helpers) {
|
||||
try {
|
||||
// Fetch quality profiles from Sonarr
|
||||
const profilesRes = await ctx.fetchT(`${detected.sonarr.localUrl}/api/v3/qualityprofile`, {
|
||||
headers: { 'X-Api-Key': detected.sonarr.apiKey }
|
||||
headers: { 'X-Api-Key': detected.sonarr.apiKey },
|
||||
});
|
||||
const profiles = profilesRes.ok ? await profilesRes.json() : [];
|
||||
const defaultProfile = profiles[0] || { id: 1, name: 'Any' };
|
||||
|
||||
// Fetch root folders from Sonarr
|
||||
const rootFoldersRes = await ctx.fetchT(`${detected.sonarr.localUrl}/api/v3/rootfolder`, {
|
||||
headers: { 'X-Api-Key': detected.sonarr.apiKey }
|
||||
headers: { 'X-Api-Key': detected.sonarr.apiKey },
|
||||
});
|
||||
const rootFolders = rootFoldersRes.ok ? await rootFoldersRes.json() : [];
|
||||
const defaultRootFolder = rootFolders[0]?.path || '/tv';
|
||||
@@ -419,7 +419,7 @@ module.exports = function(ctx, helpers) {
|
||||
let languageProfileId = 1;
|
||||
try {
|
||||
const langRes = await ctx.fetchT(`${detected.sonarr.localUrl}/api/v3/languageprofile`, {
|
||||
headers: { 'X-Api-Key': detected.sonarr.apiKey }
|
||||
headers: { 'X-Api-Key': detected.sonarr.apiKey },
|
||||
});
|
||||
if (langRes.ok) {
|
||||
const langProfiles = await langRes.json();
|
||||
@@ -444,12 +444,12 @@ module.exports = function(ctx, helpers) {
|
||||
isDefault: true,
|
||||
enableSeasonFolders: true,
|
||||
externalUrl: detected.sonarr.localUrl,
|
||||
tags: []
|
||||
tags: [],
|
||||
};
|
||||
|
||||
const resp = await overseerrFetch('/api/v1/settings/sonarr', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(sonarrConfig)
|
||||
body: JSON.stringify(sonarrConfig),
|
||||
});
|
||||
|
||||
configResults.sonarr = resp.ok ? 'configured' : `failed: ${await resp.text()}`;
|
||||
@@ -466,7 +466,7 @@ module.exports = function(ctx, helpers) {
|
||||
'deploymentSuccess',
|
||||
'Arr Stack Auto-Connected',
|
||||
`Overseerr configured: ${Object.entries(configResults).filter(([k,v]) => v === 'configured').map(([k]) => k).join(', ')}`,
|
||||
'success'
|
||||
'success',
|
||||
);
|
||||
}
|
||||
|
||||
@@ -475,7 +475,7 @@ module.exports = function(ctx, helpers) {
|
||||
message: anyConfigured ? 'Auto-setup completed successfully!' : 'Configuration failed',
|
||||
detected,
|
||||
configResults,
|
||||
summary
|
||||
summary,
|
||||
});
|
||||
}, 'arr-auto-setup'));
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ module.exports = function(ctx, helpers) {
|
||||
service,
|
||||
source: url ? 'external' : 'local',
|
||||
url: url || null,
|
||||
storedAt: new Date().toISOString()
|
||||
storedAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
// Test connection if URL is known
|
||||
@@ -77,7 +77,7 @@ module.exports = function(ctx, helpers) {
|
||||
return ctx.errorResponse(res, 400, 'Invalid seedbox base URL');
|
||||
}
|
||||
await ctx.credentialManager.store('arr.seedbox.baseurl', seedboxBaseUrl, {
|
||||
storedAt: new Date().toISOString()
|
||||
storedAt: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -87,7 +87,7 @@ module.exports = function(ctx, helpers) {
|
||||
success: true,
|
||||
message: `${service} API key stored`,
|
||||
connectionTest,
|
||||
url: resolvedUrl
|
||||
url: resolvedUrl,
|
||||
});
|
||||
}, 'arr-credentials-store'));
|
||||
|
||||
@@ -106,7 +106,7 @@ module.exports = function(ctx, helpers) {
|
||||
url: metadata?.url || null,
|
||||
lastVerified: metadata?.lastVerified || null,
|
||||
version: metadata?.version || null,
|
||||
source: metadata?.source || null
|
||||
source: metadata?.source || null,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ module.exports = function(ctx, helpers) {
|
||||
sonarr: null,
|
||||
overseerr: null,
|
||||
lidarr: null,
|
||||
prowlarr: null
|
||||
prowlarr: null,
|
||||
};
|
||||
|
||||
// Service detection patterns
|
||||
@@ -35,7 +35,7 @@ module.exports = function(ctx, helpers) {
|
||||
image: container.Image,
|
||||
port: exposedPort,
|
||||
status: container.State,
|
||||
url: helpers.getServiceUrl(containerName, exposedPort)
|
||||
url: helpers.getServiceUrl(containerName, exposedPort),
|
||||
};
|
||||
|
||||
// Get API key for arr services (not Plex or Overseerr)
|
||||
@@ -58,8 +58,8 @@ module.exports = function(ctx, helpers) {
|
||||
plexReady: !!(detected.plex?.token),
|
||||
radarrReady: !!(detected.radarr?.apiKey),
|
||||
sonarrReady: !!(detected.sonarr?.apiKey),
|
||||
overseerrRunning: !!detected.overseerr
|
||||
}
|
||||
overseerrRunning: !!detected.overseerr,
|
||||
},
|
||||
});
|
||||
}, 'arr-detect'));
|
||||
|
||||
@@ -86,7 +86,7 @@ module.exports = function(ctx, helpers) {
|
||||
containerId: container.Id,
|
||||
containerName: container.Names[0]?.replace(/^\//, ''),
|
||||
port: portInfo?.PublicPort || config.port,
|
||||
status: container.State
|
||||
status: container.State,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -122,7 +122,7 @@ module.exports = function(ctx, helpers) {
|
||||
hasToken: false,
|
||||
containerId: null,
|
||||
containerName: null,
|
||||
version: null
|
||||
version: null,
|
||||
};
|
||||
|
||||
// Check Docker first
|
||||
@@ -143,7 +143,7 @@ module.exports = function(ctx, helpers) {
|
||||
// Store for later use
|
||||
await ctx.credentialManager.store('arr.plex.token', token, {
|
||||
service: 'plex', source: 'local', url: entry.url,
|
||||
lastVerified: new Date().toISOString()
|
||||
lastVerified: new Date().toISOString(),
|
||||
});
|
||||
} else {
|
||||
entry.status = 'needs_key';
|
||||
@@ -160,7 +160,7 @@ module.exports = function(ctx, helpers) {
|
||||
try {
|
||||
const radarrCheck = await ctx.fetchT(`http://host.docker.internal:${dc.port}/api/v1/settings/radarr`, {
|
||||
headers: { 'Cookie': session.cookie },
|
||||
signal: AbortSignal.timeout(5000)
|
||||
signal: AbortSignal.timeout(5000),
|
||||
});
|
||||
if (radarrCheck.ok) {
|
||||
const radarrSettings = await radarrCheck.json();
|
||||
@@ -170,7 +170,7 @@ module.exports = function(ctx, helpers) {
|
||||
try {
|
||||
const sonarrCheck = await ctx.fetchT(`http://host.docker.internal:${dc.port}/api/v1/settings/sonarr`, {
|
||||
headers: { 'Cookie': session.cookie },
|
||||
signal: AbortSignal.timeout(5000)
|
||||
signal: AbortSignal.timeout(5000),
|
||||
});
|
||||
if (sonarrCheck.ok) {
|
||||
const sonarrSettings = await sonarrCheck.json();
|
||||
@@ -180,7 +180,7 @@ module.exports = function(ctx, helpers) {
|
||||
try {
|
||||
const plexCheck = await ctx.fetchT(`http://host.docker.internal:${dc.port}/api/v1/settings/plex`, {
|
||||
headers: { 'Cookie': session.cookie },
|
||||
signal: AbortSignal.timeout(5000)
|
||||
signal: AbortSignal.timeout(5000),
|
||||
});
|
||||
if (plexCheck.ok) {
|
||||
const plexSettings = await plexCheck.json();
|
||||
@@ -273,7 +273,7 @@ module.exports = function(ctx, helpers) {
|
||||
fullyConnected: statuses.filter(s => s.status === 'connected').length,
|
||||
needsApiKey: statuses.filter(s => s.status === 'needs_key').length,
|
||||
errors: statuses.filter(s => s.status === 'error').length,
|
||||
readyForAutoConnect: statuses.filter(s => s.status === 'connected').length >= 2
|
||||
readyForAutoConnect: statuses.filter(s => s.status === 'connected').length >= 2,
|
||||
};
|
||||
|
||||
res.json({ success: true, services: result, seedboxBaseUrl: detectedSeedboxUrl, summary });
|
||||
|
||||
@@ -12,7 +12,7 @@ module.exports = function(ctx) {
|
||||
const exec = await dockerContainer.exec({
|
||||
Cmd: ['cat', '/config/config.xml'],
|
||||
AttachStdout: true,
|
||||
AttachStderr: true
|
||||
AttachStderr: true,
|
||||
});
|
||||
|
||||
const stream = await exec.start();
|
||||
@@ -38,7 +38,7 @@ module.exports = function(ctx) {
|
||||
try {
|
||||
const containers = await ctx.docker.client.listContainers({ all: false });
|
||||
const container = containers.find(c =>
|
||||
c.Names.some(n => n.toLowerCase().includes(containerName.toLowerCase()) || n.toLowerCase().includes('plex'))
|
||||
c.Names.some(n => n.toLowerCase().includes(containerName.toLowerCase()) || n.toLowerCase().includes('plex')),
|
||||
);
|
||||
|
||||
if (!container) return null;
|
||||
@@ -47,7 +47,7 @@ module.exports = function(ctx) {
|
||||
const exec = await dockerContainer.exec({
|
||||
Cmd: ['cat', '/config/Library/Application Support/Plex Media Server/Preferences.xml'],
|
||||
AttachStdout: true,
|
||||
AttachStderr: true
|
||||
AttachStderr: true,
|
||||
});
|
||||
|
||||
const stream = await exec.start();
|
||||
@@ -97,7 +97,7 @@ module.exports = function(ctx) {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ authToken: plexToken }),
|
||||
signal: AbortSignal.timeout(10000)
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
|
||||
if (!authRes.ok) {
|
||||
@@ -125,7 +125,7 @@ module.exports = function(ctx) {
|
||||
// 1. Get Plex server identity (for return info)
|
||||
const identityRes = await ctx.fetchT(`${plexUrl}/identity`, {
|
||||
headers: { 'X-Plex-Token': plexToken, 'Accept': 'application/json' },
|
||||
signal: AbortSignal.timeout(10000)
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
if (!identityRes.ok) throw new Error('Cannot reach Plex server');
|
||||
const identity = await identityRes.json();
|
||||
@@ -136,16 +136,16 @@ module.exports = function(ctx) {
|
||||
const plexConfig = {
|
||||
ip: 'host.docker.internal',
|
||||
port: APP_PORTS.plex,
|
||||
useSsl: false
|
||||
useSsl: false,
|
||||
};
|
||||
|
||||
const configRes = await ctx.fetchT(`${overseerrUrl}/api/v1/settings/plex`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Cookie': sessionCookie
|
||||
'Cookie': sessionCookie,
|
||||
},
|
||||
body: JSON.stringify(plexConfig)
|
||||
body: JSON.stringify(plexConfig),
|
||||
});
|
||||
|
||||
if (!configRes.ok) {
|
||||
@@ -157,7 +157,7 @@ module.exports = function(ctx) {
|
||||
await ctx.fetchT(`${overseerrUrl}/api/v1/settings/plex/sync`, {
|
||||
method: 'POST',
|
||||
headers: { 'Cookie': sessionCookie },
|
||||
signal: AbortSignal.timeout(10000)
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
} catch (e) {
|
||||
ctx.log.warn('arr', 'Plex library sync trigger failed (non-fatal)', { error: e.message });
|
||||
@@ -168,7 +168,7 @@ module.exports = function(ctx) {
|
||||
try {
|
||||
const libRes = await ctx.fetchT(`${overseerrUrl}/api/v1/settings/plex`, {
|
||||
headers: { 'Cookie': sessionCookie },
|
||||
signal: AbortSignal.timeout(5000)
|
||||
signal: AbortSignal.timeout(5000),
|
||||
});
|
||||
if (libRes.ok) {
|
||||
const plexSettings = await libRes.json();
|
||||
@@ -188,7 +188,7 @@ module.exports = function(ctx) {
|
||||
try {
|
||||
const existingRes = await ctx.fetchT(`${prowlarrUrl}/api/v1/applications`, {
|
||||
headers: { 'X-Api-Key': prowlarrApiKey },
|
||||
signal: AbortSignal.timeout(10000)
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
existingApps = existingRes.ok ? await existingRes.json() : [];
|
||||
} catch (e) {
|
||||
@@ -217,8 +217,8 @@ module.exports = function(ctx) {
|
||||
{ name: 'prowlarrUrl', value: prowlarrUrl },
|
||||
{ name: 'baseUrl', value: config.url },
|
||||
{ name: 'apiKey', value: config.apiKey },
|
||||
{ name: 'syncCategories', value: syncCategories }
|
||||
]
|
||||
{ name: 'syncCategories', value: syncCategories },
|
||||
],
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -226,10 +226,10 @@ module.exports = function(ctx) {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-Api-Key': prowlarrApiKey
|
||||
'X-Api-Key': prowlarrApiKey,
|
||||
},
|
||||
body: JSON.stringify(payload),
|
||||
signal: AbortSignal.timeout(10000)
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
results[appName] = res.ok ? 'configured' : `failed: ${await res.text()}`;
|
||||
} catch (e) {
|
||||
@@ -262,7 +262,7 @@ module.exports = function(ctx) {
|
||||
const response = await ctx.fetchT(apiEndpoint, {
|
||||
method: 'GET',
|
||||
headers,
|
||||
signal: AbortSignal.timeout(15000)
|
||||
signal: AbortSignal.timeout(15000),
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
@@ -297,6 +297,6 @@ module.exports = function(ctx) {
|
||||
getOverseerrApiKey,
|
||||
connectPlexToOverseerr,
|
||||
configureProwlarrApps,
|
||||
testServiceConnection
|
||||
testServiceConnection,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -14,7 +14,7 @@ module.exports = function(ctx, helpers) {
|
||||
|
||||
if (!plexToken) {
|
||||
return ctx.errorResponse(res, 400, 'No Plex token available. Claim your Plex server first.', {
|
||||
hint: 'Deploy Plex with a claim token or manually configure it.'
|
||||
hint: 'Deploy Plex with a claim token or manually configure it.',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ module.exports = function(ctx, helpers) {
|
||||
// Fetch libraries
|
||||
const libRes = await ctx.fetchT(`${plexUrl}/library/sections`, {
|
||||
headers: { 'X-Plex-Token': plexToken, 'Accept': 'application/json' },
|
||||
signal: AbortSignal.timeout(10000)
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
|
||||
if (!libRes.ok) {
|
||||
@@ -45,7 +45,7 @@ module.exports = function(ctx, helpers) {
|
||||
title: dir.title,
|
||||
type: dir.type,
|
||||
count: parseInt(dir.count) || 0,
|
||||
scannedAt: dir.scannedAt
|
||||
scannedAt: dir.scannedAt,
|
||||
}));
|
||||
|
||||
// Get server name
|
||||
@@ -54,7 +54,7 @@ module.exports = function(ctx, helpers) {
|
||||
try {
|
||||
const identityRes = await ctx.fetchT(`${plexUrl}/identity`, {
|
||||
headers: { 'X-Plex-Token': plexToken, 'Accept': 'application/json' },
|
||||
signal: AbortSignal.timeout(5000)
|
||||
signal: AbortSignal.timeout(5000),
|
||||
});
|
||||
if (identityRes.ok) {
|
||||
const identity = await identityRes.json();
|
||||
@@ -66,7 +66,7 @@ module.exports = function(ctx, helpers) {
|
||||
// Store token for future use
|
||||
await ctx.credentialManager.store('arr.plex.token', plexToken, {
|
||||
service: 'plex', source: 'local', url: plexUrl,
|
||||
lastVerified: new Date().toISOString()
|
||||
lastVerified: new Date().toISOString(),
|
||||
});
|
||||
|
||||
res.json({ success: true, serverName, version, libraries });
|
||||
|
||||
@@ -44,7 +44,7 @@ module.exports = function(ctx, helpers) {
|
||||
steps.push({
|
||||
step: `Test ${svc.charAt(0).toUpperCase() + svc.slice(1)} connection`,
|
||||
status: test.success ? 'success' : 'failed',
|
||||
details: test.success ? `v${test.version}` : test.error
|
||||
details: test.success ? `v${test.version}` : test.error,
|
||||
});
|
||||
|
||||
if (test.success) {
|
||||
@@ -55,12 +55,12 @@ module.exports = function(ctx, helpers) {
|
||||
const stored = await ctx.credentialManager.store(`arr.${svc}.apikey`, apiKey, {
|
||||
service: svc, source: 'external', url,
|
||||
lastVerified: new Date().toISOString(),
|
||||
version: test.version
|
||||
version: test.version,
|
||||
});
|
||||
steps.push({
|
||||
step: `Save ${svc} credentials`,
|
||||
status: stored ? 'success' : 'failed',
|
||||
details: stored ? 'Encrypted and saved' : 'Storage failed'
|
||||
details: stored ? 'Encrypted and saved' : 'Storage failed',
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -94,7 +94,7 @@ module.exports = function(ctx, helpers) {
|
||||
steps.push({
|
||||
step: 'Get Overseerr API key',
|
||||
status: 'failed',
|
||||
details: 'Could not authenticate with Overseerr (Plex not running or not linked)'
|
||||
details: 'Could not authenticate with Overseerr (Plex not running or not linked)',
|
||||
});
|
||||
} else {
|
||||
steps.push({ step: 'Get Overseerr API key', status: 'success', details: 'Extracted from container' });
|
||||
@@ -110,7 +110,7 @@ module.exports = function(ctx, helpers) {
|
||||
// Fetch quality profiles
|
||||
const profilesRes = await ctx.fetchT(`${radarrUrl}/api/v3/qualityprofile`, {
|
||||
headers: { 'X-Api-Key': connectedServices.radarr.apiKey },
|
||||
signal: AbortSignal.timeout(10000)
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
const profiles = profilesRes.ok ? await profilesRes.json() : [];
|
||||
const defaultProfile = profiles[0] || { id: 1, name: 'Any' };
|
||||
@@ -118,7 +118,7 @@ module.exports = function(ctx, helpers) {
|
||||
// Fetch root folders
|
||||
const rootFoldersRes = await ctx.fetchT(`${radarrUrl}/api/v3/rootfolder`, {
|
||||
headers: { 'X-Api-Key': connectedServices.radarr.apiKey },
|
||||
signal: AbortSignal.timeout(10000)
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
const rootFolders = rootFoldersRes.ok ? await rootFoldersRes.json() : [];
|
||||
const defaultRootFolder = rootFolders[0]?.path || '/movies';
|
||||
@@ -141,20 +141,20 @@ module.exports = function(ctx, helpers) {
|
||||
minimumAvailability: 'released',
|
||||
isDefault: true,
|
||||
externalUrl: connectedServices.radarr.url,
|
||||
tags: []
|
||||
tags: [],
|
||||
};
|
||||
|
||||
const radarrRes = await ctx.fetchT(`${overseerrUrl}/api/v1/settings/radarr`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json', 'Cookie': overseerrCookie },
|
||||
body: JSON.stringify(radarrConfig),
|
||||
signal: AbortSignal.timeout(10000)
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
|
||||
steps.push({
|
||||
step: 'Configure Radarr in Overseerr',
|
||||
status: radarrRes.ok ? 'success' : 'failed',
|
||||
details: radarrRes.ok ? `Profile: ${defaultProfile.name}, Root: ${defaultRootFolder}` : await radarrRes.text()
|
||||
details: radarrRes.ok ? `Profile: ${defaultProfile.name}, Root: ${defaultRootFolder}` : await radarrRes.text(),
|
||||
});
|
||||
} catch (e) {
|
||||
steps.push({ step: 'Configure Radarr in Overseerr', status: 'failed', details: e.message });
|
||||
@@ -170,14 +170,14 @@ module.exports = function(ctx, helpers) {
|
||||
|
||||
const profilesRes = await ctx.fetchT(`${sonarrUrl}/api/v3/qualityprofile`, {
|
||||
headers: { 'X-Api-Key': connectedServices.sonarr.apiKey },
|
||||
signal: AbortSignal.timeout(10000)
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
const profiles = profilesRes.ok ? await profilesRes.json() : [];
|
||||
const defaultProfile = profiles[0] || { id: 1, name: 'Any' };
|
||||
|
||||
const rootFoldersRes = await ctx.fetchT(`${sonarrUrl}/api/v3/rootfolder`, {
|
||||
headers: { 'X-Api-Key': connectedServices.sonarr.apiKey },
|
||||
signal: AbortSignal.timeout(10000)
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
const rootFolders = rootFoldersRes.ok ? await rootFoldersRes.json() : [];
|
||||
const defaultRootFolder = rootFolders[0]?.path || '/tv';
|
||||
@@ -186,7 +186,7 @@ module.exports = function(ctx, helpers) {
|
||||
try {
|
||||
const langRes = await ctx.fetchT(`${sonarrUrl}/api/v3/languageprofile`, {
|
||||
headers: { 'X-Api-Key': connectedServices.sonarr.apiKey },
|
||||
signal: AbortSignal.timeout(5000)
|
||||
signal: AbortSignal.timeout(5000),
|
||||
});
|
||||
if (langRes.ok) {
|
||||
const langProfiles = await langRes.json();
|
||||
@@ -212,20 +212,20 @@ module.exports = function(ctx, helpers) {
|
||||
isDefault: true,
|
||||
enableSeasonFolders: true,
|
||||
externalUrl: connectedServices.sonarr.url,
|
||||
tags: []
|
||||
tags: [],
|
||||
};
|
||||
|
||||
const sonarrRes = await ctx.fetchT(`${overseerrUrl}/api/v1/settings/sonarr`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json', 'Cookie': overseerrCookie },
|
||||
body: JSON.stringify(sonarrConfig),
|
||||
signal: AbortSignal.timeout(10000)
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
|
||||
steps.push({
|
||||
step: 'Configure Sonarr in Overseerr',
|
||||
status: sonarrRes.ok ? 'success' : 'failed',
|
||||
details: sonarrRes.ok ? `Profile: ${defaultProfile.name}, Root: ${defaultRootFolder}` : await sonarrRes.text()
|
||||
details: sonarrRes.ok ? `Profile: ${defaultProfile.name}, Root: ${defaultRootFolder}` : await sonarrRes.text(),
|
||||
});
|
||||
} catch (e) {
|
||||
steps.push({ step: 'Configure Sonarr in Overseerr', status: 'failed', details: e.message });
|
||||
@@ -239,7 +239,7 @@ module.exports = function(ctx, helpers) {
|
||||
steps.push({
|
||||
step: 'Connect Plex to Overseerr',
|
||||
status: 'success',
|
||||
details: `${plexResult.serverName} - ${plexResult.libraries.length} libraries synced`
|
||||
details: `${plexResult.serverName} - ${plexResult.libraries.length} libraries synced`,
|
||||
});
|
||||
} catch (e) {
|
||||
steps.push({ step: 'Connect Plex to Overseerr', status: 'failed', details: e.message });
|
||||
@@ -259,13 +259,13 @@ module.exports = function(ctx, helpers) {
|
||||
const prowlarrResults = await helpers.configureProwlarrApps(
|
||||
connectedServices.prowlarr.url.replace(/\/+$/, ''),
|
||||
connectedServices.prowlarr.apiKey,
|
||||
appsToConnect
|
||||
appsToConnect,
|
||||
);
|
||||
for (const [app, status] of Object.entries(prowlarrResults)) {
|
||||
steps.push({
|
||||
step: `Add ${app.charAt(0).toUpperCase() + app.slice(1)} to Prowlarr`,
|
||||
status: status === 'configured' || status === 'already_configured' ? 'success' : 'failed',
|
||||
details: status
|
||||
details: status,
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
@@ -283,14 +283,14 @@ module.exports = function(ctx, helpers) {
|
||||
'deploymentSuccess',
|
||||
'Smart Arr Connect Complete',
|
||||
`${succeeded}/${steps.length} steps completed successfully`,
|
||||
'success'
|
||||
'success',
|
||||
);
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: succeeded > 0,
|
||||
steps,
|
||||
summary: { totalSteps: steps.length, succeeded, failed }
|
||||
summary: { totalSteps: steps.length, succeeded, failed },
|
||||
});
|
||||
}, 'smart-connect'));
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ module.exports = function(ctx) {
|
||||
m: 60 * 1000,
|
||||
h: 60 * 60 * 1000,
|
||||
d: 24 * 60 * 60 * 1000,
|
||||
y: 365 * 24 * 60 * 60 * 1000
|
||||
y: 365 * 24 * 60 * 60 * 1000,
|
||||
};
|
||||
|
||||
return value * (multipliers[unit] || multipliers.h);
|
||||
@@ -54,7 +54,7 @@ module.exports = function(ctx) {
|
||||
|
||||
const keyData = await ctx.authManager.generateAPIKey(
|
||||
name.trim(),
|
||||
scopes || ['read', 'write']
|
||||
scopes || ['read', 'write'],
|
||||
);
|
||||
|
||||
res.json({
|
||||
@@ -64,7 +64,7 @@ module.exports = function(ctx) {
|
||||
name: keyData.name,
|
||||
scopes: keyData.scopes,
|
||||
createdAt: keyData.createdAt,
|
||||
warning: 'Save this key securely - it will not be shown again'
|
||||
warning: 'Save this key securely - it will not be shown again',
|
||||
});
|
||||
}, 'auth-keys-generate'));
|
||||
|
||||
@@ -109,9 +109,9 @@ module.exports = function(ctx) {
|
||||
const token = await ctx.authManager.generateJWT(
|
||||
{
|
||||
sub: userId || 'dashcaddy-admin',
|
||||
scope: ['admin'] // Session-generated JWTs have admin scope
|
||||
scope: ['admin'], // Session-generated JWTs have admin scope
|
||||
},
|
||||
expiresIn || '24h'
|
||||
expiresIn || '24h',
|
||||
);
|
||||
|
||||
// Calculate expiration timestamp
|
||||
@@ -122,7 +122,7 @@ module.exports = function(ctx) {
|
||||
success: true,
|
||||
token,
|
||||
expiresAt,
|
||||
usage: 'Include in Authorization header as: Bearer <token>'
|
||||
usage: 'Include in Authorization header as: Bearer <token>',
|
||||
});
|
||||
}, 'auth-jwt-generate'));
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ module.exports = function(ctx) {
|
||||
const { spawnSync } = require('child_process');
|
||||
const proc = spawnSync('wget', [
|
||||
'-q', '-S', `--post-data=${routerBody}`, '-O', '/dev/null',
|
||||
`${baseUrl}/cgi-bin/login.ha`
|
||||
`${baseUrl}/cgi-bin/login.ha`,
|
||||
], { timeout: 5000, encoding: 'utf8' });
|
||||
const result = (proc.stderr || '').split('\n').slice(0, 2).join('\n');
|
||||
const locationMatch = result.match(/Location:\s*(.+)/);
|
||||
|
||||
@@ -10,8 +10,8 @@ module.exports = function(ctx) {
|
||||
config: {
|
||||
enabled: ctx.totpConfig.enabled,
|
||||
sessionDuration: ctx.totpConfig.sessionDuration,
|
||||
isSetUp: ctx.totpConfig.isSetUp
|
||||
}
|
||||
isSetUp: ctx.totpConfig.isSetUp,
|
||||
},
|
||||
});
|
||||
}, 'totp-config-get'));
|
||||
|
||||
@@ -35,7 +35,7 @@ module.exports = function(ctx) {
|
||||
const otpauth = authenticator.keyuri('user', 'DashCaddy', secret);
|
||||
const qrDataUrl = await QRCode.toDataURL(otpauth, {
|
||||
width: 256, margin: 2,
|
||||
color: { dark: '#ffffff', light: '#00000000' }
|
||||
color: { dark: '#ffffff', light: '#00000000' },
|
||||
});
|
||||
|
||||
res.json({ success: true, qrCode: qrDataUrl, manualKey: secret, issuer: 'DashCaddy', imported: !!req.body?.secret });
|
||||
@@ -166,7 +166,7 @@ module.exports = function(ctx) {
|
||||
|
||||
if (sessionDuration && !ctx.session.durations.hasOwnProperty(sessionDuration)) {
|
||||
return ctx.errorResponse(res, 400, 'Invalid session duration', {
|
||||
validOptions: Object.keys(ctx.session.durations)
|
||||
validOptions: Object.keys(ctx.session.durations),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -180,7 +180,7 @@ module.exports = function(ctx) {
|
||||
await ctx.saveTotpConfig();
|
||||
res.json({
|
||||
success: true,
|
||||
config: { enabled: ctx.totpConfig.enabled, sessionDuration: ctx.totpConfig.sessionDuration, isSetUp: ctx.totpConfig.isSetUp }
|
||||
config: { enabled: ctx.totpConfig.enabled, sessionDuration: ctx.totpConfig.sessionDuration, isSetUp: ctx.totpConfig.isSetUp },
|
||||
});
|
||||
}, 'totp-config'));
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ module.exports = function(ctx) {
|
||||
const allRoots = BROWSE_ROOTS.map(r => ({
|
||||
name: r.hostPath,
|
||||
path: r.hostPath,
|
||||
containerPath: r.containerPath
|
||||
containerPath: r.containerPath,
|
||||
}));
|
||||
|
||||
const roots = [];
|
||||
@@ -45,7 +45,7 @@ module.exports = function(ctx) {
|
||||
const allRoots = BROWSE_ROOTS.map(r => ({
|
||||
name: r.hostPath,
|
||||
path: r.hostPath,
|
||||
type: 'drive'
|
||||
type: 'drive',
|
||||
}));
|
||||
const roots = [];
|
||||
for (const r of allRoots) {
|
||||
@@ -58,12 +58,12 @@ module.exports = function(ctx) {
|
||||
}
|
||||
|
||||
const matchingRoot = BROWSE_ROOTS.find(r =>
|
||||
requestedPath.startsWith(r.hostPath) || requestedPath === r.hostPath.replace(/\/$/, '')
|
||||
requestedPath.startsWith(r.hostPath) || requestedPath === r.hostPath.replace(/\/$/, ''),
|
||||
);
|
||||
|
||||
if (!matchingRoot) {
|
||||
return ctx.errorResponse(res, 400, 'Path not in browseable roots', {
|
||||
availableRoots: BROWSE_ROOTS.map(r => r.hostPath)
|
||||
availableRoots: BROWSE_ROOTS.map(r => r.hostPath),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -80,7 +80,7 @@ module.exports = function(ctx) {
|
||||
requestedPath, containerFullPath, allowedRoots,
|
||||
error: error.message,
|
||||
ip: req.ip,
|
||||
userAgent: req.get('user-agent')
|
||||
userAgent: req.get('user-agent'),
|
||||
});
|
||||
return ctx.errorResponse(res, 403, 'Access denied - path traversal detected');
|
||||
}
|
||||
@@ -108,7 +108,7 @@ module.exports = function(ctx) {
|
||||
.map(entry => ({
|
||||
name: entry.name,
|
||||
path: path.join(requestedPath, entry.name).replace(/\\/g, '/'),
|
||||
type: 'folder'
|
||||
type: 'folder',
|
||||
}))
|
||||
.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
@@ -119,7 +119,7 @@ module.exports = function(ctx) {
|
||||
path: requestedPath,
|
||||
parent: path.dirname(requestedPath).replace(/\\/g, '/') || null,
|
||||
items: result.data,
|
||||
...(result.pagination && { pagination: result.pagination })
|
||||
...(result.pagination && { pagination: result.pagination }),
|
||||
});
|
||||
}, 'browse-dir'));
|
||||
|
||||
@@ -128,12 +128,12 @@ module.exports = function(ctx) {
|
||||
const mediaServerPatterns = [
|
||||
'plex', 'jellyfin', 'emby', 'kodi', 'navidrome', 'airsonic',
|
||||
'subsonic', 'funkwhale', 'beets', 'lidarr', 'sonarr', 'radarr',
|
||||
'bazarr', 'readarr', 'prowlarr', 'overseerr', 'ombi', 'tautulli'
|
||||
'bazarr', 'readarr', 'prowlarr', 'overseerr', 'ombi', 'tautulli',
|
||||
];
|
||||
|
||||
const excludePatterns = [
|
||||
'/config', '/cache', '/transcode', '/data/config', '/app',
|
||||
'/tmp', '/var', '/etc', '/opt', '/root', '/home', '/.', '/caddyfile'
|
||||
'/tmp', '/var', '/etc', '/opt', '/root', '/home', '/.', '/caddyfile',
|
||||
];
|
||||
|
||||
const containers = await ctx.docker.client.listContainers({ all: false });
|
||||
@@ -155,7 +155,7 @@ module.exports = function(ctx) {
|
||||
|
||||
let hostPath, containerPath;
|
||||
if (parts[0].length === 1 && /[A-Za-z]/.test(parts[0])) {
|
||||
hostPath = parts[0] + ':' + parts[1];
|
||||
hostPath = `${parts[0] }:${ parts[1]}`;
|
||||
containerPath = parts[2] || '';
|
||||
} else {
|
||||
hostPath = parts[0];
|
||||
@@ -164,7 +164,7 @@ module.exports = function(ctx) {
|
||||
|
||||
const isExcluded = excludePatterns.some(p =>
|
||||
containerPath.toLowerCase().includes(p.toLowerCase()) ||
|
||||
hostPath.toLowerCase().includes(p.toLowerCase())
|
||||
hostPath.toLowerCase().includes(p.toLowerCase()),
|
||||
);
|
||||
if (isExcluded) continue;
|
||||
if (seenPaths.has(hostPath)) continue;
|
||||
@@ -175,7 +175,7 @@ module.exports = function(ctx) {
|
||||
detectedMounts.push({
|
||||
hostPath, containerPath, folderName,
|
||||
sourceContainer: containerInfo.Names[0]?.replace('/', '') || containerInfo.Id.slice(0, 12),
|
||||
sourceImage: containerInfo.Image.split('/').pop().split(':')[0]
|
||||
sourceImage: containerInfo.Image.split('/').pop().split(':')[0],
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -185,7 +185,7 @@ module.exports = function(ctx) {
|
||||
mounts: detectedMounts,
|
||||
message: detectedMounts.length > 0
|
||||
? `Found ${detectedMounts.length} media mount(s) from existing containers`
|
||||
: 'No existing media mounts detected'
|
||||
: 'No existing media mounts detected',
|
||||
});
|
||||
}, 'detect-media-mounts'));
|
||||
|
||||
|
||||
@@ -38,8 +38,8 @@ module.exports = function(ctx) {
|
||||
daysUntilExpiration,
|
||||
algorithm: certInfo.algorithm || 'ECDSA P-256 with SHA-256',
|
||||
serialNumber: certInfo.serialNumber,
|
||||
downloadUrl: `https://ca${ctx.siteConfig.tld}/root.crt`
|
||||
}
|
||||
downloadUrl: `https://ca${ctx.siteConfig.tld}/root.crt`,
|
||||
},
|
||||
});
|
||||
}, 'ca-info'));
|
||||
|
||||
@@ -99,7 +99,7 @@ module.exports = function(ctx) {
|
||||
// Look for template in multiple locations (packaged app vs dev)
|
||||
const templatePaths = [
|
||||
path.join(__dirname, '..', 'scripts', templateName),
|
||||
path.join('/app', 'scripts', templateName)
|
||||
path.join('/app', 'scripts', templateName),
|
||||
];
|
||||
|
||||
let templateContent;
|
||||
@@ -208,12 +208,12 @@ ${domain.includes('.') ? `DNS.2 = *.${domain}` : ''}`;
|
||||
const serverCertContent = await fsp.readFile(certFile, 'utf8');
|
||||
const intermediateCertContent = await fsp.readFile(intermediateCert, 'utf8');
|
||||
const rootCertContent = await fsp.readFile(rootCert, 'utf8');
|
||||
await fsp.writeFile(fullChainFile, serverCertContent + '\n' + intermediateCertContent + '\n' + rootCertContent);
|
||||
await fsp.writeFile(fullChainFile, `${serverCertContent }\n${ intermediateCertContent }\n${ rootCertContent}`);
|
||||
|
||||
execSync(`openssl pkcs12 -export -out "${pfxFile}" -inkey "${keyFile}" -in "${certFile}" -certfile "${intermediateCert}" -password "pass:${password}"`, { stdio: 'pipe' });
|
||||
|
||||
const keyContent = await fsp.readFile(keyFile, 'utf8');
|
||||
await fsp.writeFile(pemFile, keyContent + '\n' + serverCertContent + '\n' + intermediateCertContent);
|
||||
await fsp.writeFile(pemFile, `${keyContent }\n${ serverCertContent }\n${ intermediateCertContent}`);
|
||||
}
|
||||
|
||||
if (format === 'pfx') {
|
||||
@@ -274,7 +274,7 @@ ${domain.includes('.') ? `DNS.2 = *.${domain}` : ''}`;
|
||||
domain, subject,
|
||||
validFrom: notBefore, validUntil: notAfter,
|
||||
daysUntilExpiration, fingerprint,
|
||||
status: daysUntilExpiration < 0 ? 'expired' : daysUntilExpiration < 30 ? 'expiring-soon' : 'valid'
|
||||
status: daysUntilExpiration < 0 ? 'expired' : daysUntilExpiration < 30 ? 'expiring-soon' : 'valid',
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
|
||||
@@ -56,7 +56,7 @@ module.exports = function(ctx) {
|
||||
res.json({
|
||||
success: true,
|
||||
path: `/assets/${safeFilename}`,
|
||||
message: `Logo saved to ${filePath}`
|
||||
message: `Logo saved to ${filePath}`,
|
||||
});
|
||||
}, 'assets-upload'));
|
||||
|
||||
@@ -75,7 +75,7 @@ module.exports = function(ctx) {
|
||||
customLogo: config.customLogo || config.customLogoDark || null,
|
||||
position: config.logoPosition || 'left',
|
||||
dashboardTitle: config.dashboardTitle || 'DashCaddy',
|
||||
isDefault: !config.customLogoDark && !config.customLogoLight && !config.customLogo
|
||||
isDefault: !config.customLogoDark && !config.customLogoLight && !config.customLogo,
|
||||
});
|
||||
}, 'logo-get'));
|
||||
|
||||
@@ -153,7 +153,7 @@ module.exports = function(ctx) {
|
||||
path: pathDark || pathLight,
|
||||
position: config.logoPosition || 'left',
|
||||
dashboardTitle: config.dashboardTitle || 'DashCaddy',
|
||||
message: 'Branding settings saved'
|
||||
message: 'Branding settings saved',
|
||||
});
|
||||
}, 'logo-upload'));
|
||||
|
||||
@@ -186,7 +186,7 @@ module.exports = function(ctx) {
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Branding reset to defaults'
|
||||
message: 'Branding reset to defaults',
|
||||
});
|
||||
}, 'logo-delete'));
|
||||
|
||||
@@ -199,7 +199,7 @@ module.exports = function(ctx) {
|
||||
res.json({
|
||||
success: true,
|
||||
customFavicon: config.customFavicon || null,
|
||||
isDefault: !config.customFavicon
|
||||
isDefault: !config.customFavicon,
|
||||
});
|
||||
}, 'favicon-get'));
|
||||
|
||||
@@ -237,8 +237,8 @@ module.exports = function(ctx) {
|
||||
sharp(buffer)
|
||||
.resize(size, size, { fit: 'contain', background: { r: 0, g: 0, b: 0, alpha: 0 } })
|
||||
.png()
|
||||
.toBuffer()
|
||||
)
|
||||
.toBuffer(),
|
||||
),
|
||||
);
|
||||
|
||||
// Convert to ICO
|
||||
@@ -261,7 +261,7 @@ module.exports = function(ctx) {
|
||||
res.json({
|
||||
success: true,
|
||||
path: '/assets/favicon.ico',
|
||||
message: 'Favicon created successfully'
|
||||
message: 'Favicon created successfully',
|
||||
});
|
||||
}, 'favicon'));
|
||||
|
||||
@@ -285,7 +285,7 @@ module.exports = function(ctx) {
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Favicon reset to default'
|
||||
message: 'Favicon reset to default',
|
||||
});
|
||||
}, 'favicon-delete'));
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ module.exports = function(ctx) {
|
||||
dashcaddyVersion: '1.0.0',
|
||||
files: {},
|
||||
themes: {},
|
||||
assets: {}
|
||||
assets: {},
|
||||
};
|
||||
|
||||
// Collect all configuration files (encryption key now included for self-contained restore)
|
||||
@@ -48,7 +48,7 @@ module.exports = function(ctx) {
|
||||
{ key: 'encryptionKey', path: ENCRYPTION_KEY_FILE, required: false },
|
||||
{ key: 'totpConfig', path: ctx.TOTP_CONFIG_FILE, required: false },
|
||||
{ key: 'tailscaleConfig', path: ctx.TAILSCALE_CONFIG_FILE, required: false },
|
||||
{ key: 'notifications', path: ctx.NOTIFICATIONS_FILE, required: false }
|
||||
{ key: 'notifications', path: ctx.NOTIFICATIONS_FILE, required: false },
|
||||
];
|
||||
|
||||
for (const file of filesToBackup) {
|
||||
@@ -59,12 +59,12 @@ module.exports = function(ctx) {
|
||||
try {
|
||||
backup.files[file.key] = {
|
||||
type: 'json',
|
||||
data: JSON.parse(content)
|
||||
data: JSON.parse(content),
|
||||
};
|
||||
} catch {
|
||||
backup.files[file.key] = {
|
||||
type: 'text',
|
||||
data: content
|
||||
data: content,
|
||||
};
|
||||
}
|
||||
} else if (file.required) {
|
||||
@@ -85,7 +85,7 @@ module.exports = function(ctx) {
|
||||
const otpauth = authenticator.keyuri('user', 'DashCaddy', secret);
|
||||
const qrDataUrl = await QRCode.toDataURL(otpauth, {
|
||||
width: 256, margin: 2,
|
||||
color: { dark: '#000000', light: '#ffffff' }
|
||||
color: { dark: '#000000', light: '#ffffff' },
|
||||
});
|
||||
backup.totp = { qrCode: qrDataUrl, issuer: 'DashCaddy' };
|
||||
}
|
||||
@@ -140,7 +140,7 @@ module.exports = function(ctx) {
|
||||
valid: true,
|
||||
version: backup.version,
|
||||
exportedAt: backup.exportedAt,
|
||||
files: {}
|
||||
files: {},
|
||||
};
|
||||
|
||||
// Check each file in the backup
|
||||
@@ -154,7 +154,7 @@ module.exports = function(ctx) {
|
||||
encryptionKey: { path: ENCRYPTION_KEY_FILE, description: 'Encryption key (for credentials)' },
|
||||
totpConfig: { path: ctx.TOTP_CONFIG_FILE, description: 'TOTP authentication config' },
|
||||
tailscaleConfig: { path: ctx.TAILSCALE_CONFIG_FILE, description: 'Tailscale config' },
|
||||
notifications: { path: ctx.NOTIFICATIONS_FILE, description: 'Notification settings' }
|
||||
notifications: { path: ctx.NOTIFICATIONS_FILE, description: 'Notification settings' },
|
||||
};
|
||||
|
||||
for (const [key, value] of Object.entries(backup.files)) {
|
||||
@@ -167,7 +167,7 @@ module.exports = function(ctx) {
|
||||
inBackup: true,
|
||||
currentExists,
|
||||
action: currentExists ? 'overwrite' : 'create',
|
||||
type: value.type
|
||||
type: value.type,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -204,7 +204,7 @@ module.exports = function(ctx) {
|
||||
// Require TOTP verification for restores that include security-sensitive files
|
||||
const sensitiveKeys = ['credentials', 'totpConfig', 'encryptionKey'];
|
||||
const restoresSensitive = sensitiveKeys.some(key =>
|
||||
backup.files[key] && backup.files[key].type !== 'missing' && !(options.skip || []).includes(key)
|
||||
backup.files[key] && backup.files[key].type !== 'missing' && !(options.skip || []).includes(key),
|
||||
);
|
||||
if (restoresSensitive && ctx.totpConfig.enabled && ctx.totpConfig.isSetUp) {
|
||||
if (!totpCode || !/^\d{6}$/.test(totpCode)) {
|
||||
@@ -223,7 +223,7 @@ module.exports = function(ctx) {
|
||||
const results = {
|
||||
restored: [],
|
||||
skipped: [],
|
||||
errors: []
|
||||
errors: [],
|
||||
};
|
||||
|
||||
const ENCRYPTION_KEY_FILE = process.env.ENCRYPTION_KEY_FILE || path.join(path.dirname(ctx.SERVICES_FILE), '.encryption-key');
|
||||
@@ -236,7 +236,7 @@ module.exports = function(ctx) {
|
||||
encryptionKey: ENCRYPTION_KEY_FILE,
|
||||
totpConfig: ctx.TOTP_CONFIG_FILE,
|
||||
tailscaleConfig: ctx.TAILSCALE_CONFIG_FILE,
|
||||
notifications: ctx.NOTIFICATIONS_FILE
|
||||
notifications: ctx.NOTIFICATIONS_FILE,
|
||||
};
|
||||
|
||||
// Restore each file
|
||||
@@ -286,7 +286,7 @@ module.exports = function(ctx) {
|
||||
const loadResponse = await ctx.fetchT(`${ctx.caddy.adminUrl}/load`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': CADDY.CONTENT_TYPE },
|
||||
body: caddyContent
|
||||
body: caddyContent,
|
||||
});
|
||||
|
||||
if (loadResponse.ok) {
|
||||
@@ -345,7 +345,7 @@ module.exports = function(ctx) {
|
||||
if (!fs.existsSync(THEMES_DIR)) fs.mkdirSync(THEMES_DIR, { recursive: true });
|
||||
for (const [slug, data] of Object.entries(backup.themes)) {
|
||||
if (/^[a-z0-9-]+$/.test(slug)) {
|
||||
fs.writeFileSync(path.join(THEMES_DIR, slug + '.json'), JSON.stringify(data, null, 2), 'utf8');
|
||||
fs.writeFileSync(path.join(THEMES_DIR, `${slug }.json`), JSON.stringify(data, null, 2), 'utf8');
|
||||
}
|
||||
}
|
||||
results.restored.push(`themes:${Object.keys(backup.themes).length}`);
|
||||
@@ -376,7 +376,7 @@ module.exports = function(ctx) {
|
||||
message: success
|
||||
? `Restored ${results.restored.length} file(s) successfully`
|
||||
: `Restore completed with ${results.errors.length} error(s)`,
|
||||
results
|
||||
results,
|
||||
});
|
||||
|
||||
ctx.log.info('backup', 'Backup restore completed', { restored: results.restored.length, errors: results.errors.length });
|
||||
|
||||
@@ -75,16 +75,16 @@ module.exports = function(ctx) {
|
||||
CapAdd: hostConfig.CapAdd,
|
||||
CapDrop: hostConfig.CapDrop,
|
||||
Devices: hostConfig.Devices,
|
||||
LogConfig: DOCKER.LOG_CONFIG // Ensure log rotation on updated containers
|
||||
LogConfig: DOCKER.LOG_CONFIG, // Ensure log rotation on updated containers
|
||||
},
|
||||
NetworkingConfig: {}
|
||||
NetworkingConfig: {},
|
||||
};
|
||||
|
||||
// Get network settings if using a custom network
|
||||
if (hostConfig.NetworkMode && !['bridge', 'host', 'none'].includes(hostConfig.NetworkMode)) {
|
||||
const networkName = hostConfig.NetworkMode;
|
||||
config.NetworkingConfig.EndpointsConfig = {
|
||||
[networkName]: containerInfo.NetworkSettings.Networks[networkName]
|
||||
[networkName]: containerInfo.NetworkSettings.Networks[networkName],
|
||||
};
|
||||
}
|
||||
|
||||
@@ -119,7 +119,7 @@ module.exports = function(ctx) {
|
||||
try {
|
||||
const pruneResult = await ctx.docker.client.pruneImages({ filters: { dangling: { true: true } } });
|
||||
if (pruneResult.SpaceReclaimed > 0) {
|
||||
ctx.log.info('docker', 'Pruned dangling images after update', { spaceReclaimed: Math.round(pruneResult.SpaceReclaimed / 1024 / 1024) + 'MB' });
|
||||
ctx.log.info('docker', 'Pruned dangling images after update', { spaceReclaimed: `${Math.round(pruneResult.SpaceReclaimed / 1024 / 1024) }MB` });
|
||||
}
|
||||
} catch (pruneErr) {
|
||||
ctx.log.debug('docker', 'Image prune after update failed', { error: pruneErr.message });
|
||||
@@ -128,7 +128,7 @@ module.exports = function(ctx) {
|
||||
res.json({
|
||||
success: true,
|
||||
message: `Container ${containerName} updated successfully`,
|
||||
newContainerId: newContainerInfo.Id
|
||||
newContainerId: newContainerInfo.Id,
|
||||
});
|
||||
}, 'container-update'));
|
||||
|
||||
@@ -148,7 +148,7 @@ module.exports = function(ctx) {
|
||||
const pullStream = await ctx.docker.pull(imageName);
|
||||
|
||||
const downloadedLayers = pullStream.filter(e =>
|
||||
e.status === 'Downloading' || e.status === 'Download complete'
|
||||
e.status === 'Downloading' || e.status === 'Download complete',
|
||||
);
|
||||
updateAvailable = downloadedLayers.length > 0;
|
||||
|
||||
@@ -167,7 +167,7 @@ module.exports = function(ctx) {
|
||||
success: true,
|
||||
imageName,
|
||||
updateAvailable,
|
||||
currentDigest: localDigest
|
||||
currentDigest: localDigest,
|
||||
});
|
||||
}, 'container-check-update'));
|
||||
|
||||
@@ -178,7 +178,7 @@ module.exports = function(ctx) {
|
||||
stdout: true,
|
||||
stderr: true,
|
||||
tail: 100,
|
||||
timestamps: true
|
||||
timestamps: true,
|
||||
});
|
||||
res.json({ success: true, logs: logs.toString() });
|
||||
}, 'container-logs'));
|
||||
@@ -194,7 +194,7 @@ module.exports = function(ctx) {
|
||||
router.get('/discover', ctx.asyncHandler(async (req, res) => {
|
||||
const containers = await ctx.docker.client.listContainers({ all: true });
|
||||
const samiContainers = containers.filter(container =>
|
||||
container.Labels && container.Labels['sami.managed'] === 'true'
|
||||
container.Labels && container.Labels['sami.managed'] === 'true',
|
||||
);
|
||||
|
||||
const discoveredContainers = samiContainers.map(container => ({
|
||||
@@ -205,7 +205,7 @@ module.exports = function(ctx) {
|
||||
status: container.Status,
|
||||
appTemplate: container.Labels['sami.app'],
|
||||
subdomain: container.Labels['sami.subdomain'],
|
||||
ports: container.Ports
|
||||
ports: container.Ports,
|
||||
}));
|
||||
|
||||
const paginationParams = parsePaginationParams(req.query);
|
||||
|
||||
@@ -113,7 +113,7 @@ module.exports = function(ctx) {
|
||||
const zone = parts.slice(1).join('.') || ctx.siteConfig.tld.replace(/^\./, '');
|
||||
|
||||
const result = await ctx.dns.call(dnsServer, '/api/zones/records/add', {
|
||||
token: dnsToken, domain, zone, type: 'A', ipAddress: ip, ttl: recordTtl.toString(), overwrite: 'true'
|
||||
token: dnsToken, domain, zone, type: 'A', ipAddress: ip, ttl: recordTtl.toString(), overwrite: 'true',
|
||||
});
|
||||
|
||||
if (result.status === 'ok') {
|
||||
@@ -151,7 +151,7 @@ module.exports = function(ctx) {
|
||||
|
||||
try {
|
||||
const result = await ctx.dns.call(dnsServer, '/api/zones/records/get', {
|
||||
token: dnsToken, domain, zone: ctx.siteConfig.tld.replace(/^\./, ''), listZone: 'true'
|
||||
token: dnsToken, domain, zone: ctx.siteConfig.tld.replace(/^\./, ''), listZone: 'true',
|
||||
});
|
||||
|
||||
if (result.status === 'ok' && result.response && result.response.records) {
|
||||
@@ -218,7 +218,7 @@ module.exports = function(ctx) {
|
||||
const response = await ctx.fetchT(technitiumUrl, {
|
||||
method: 'GET',
|
||||
headers: { 'Accept': 'text/plain' },
|
||||
timeout: 10000
|
||||
timeout: 10000,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
@@ -232,7 +232,7 @@ module.exports = function(ctx) {
|
||||
server: server,
|
||||
count: 0,
|
||||
logs: [],
|
||||
message: 'No logs available for this server'
|
||||
message: 'No logs available for this server',
|
||||
});
|
||||
}
|
||||
return ctx.errorResponse(res, response.status, ctx.safeErrorMessage(errorJson.errorMessage || errorText));
|
||||
@@ -255,7 +255,7 @@ module.exports = function(ctx) {
|
||||
server: server,
|
||||
count: 0,
|
||||
logs: [],
|
||||
message: 'No logs available for this server'
|
||||
message: 'No logs available for this server',
|
||||
});
|
||||
}
|
||||
// Invalidate cached token on auth errors so next request re-authenticates
|
||||
@@ -287,7 +287,7 @@ module.exports = function(ctx) {
|
||||
class: match[6].trim(),
|
||||
rcode: match[7].trim(),
|
||||
answer: match[8].trim() || null,
|
||||
raw: line
|
||||
raw: line,
|
||||
};
|
||||
}
|
||||
return { raw: line, parsed: false };
|
||||
@@ -299,7 +299,7 @@ module.exports = function(ctx) {
|
||||
server: server,
|
||||
logFile: logFileName,
|
||||
count: parsedLogs.length,
|
||||
logs: parsedLogs
|
||||
logs: parsedLogs,
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
@@ -319,7 +319,7 @@ module.exports = function(ctx) {
|
||||
hasCredentials,
|
||||
hasToken,
|
||||
tokenExpiry: ctx.dns.getTokenExpiry(),
|
||||
isExpired: ctx.dns.getTokenExpiry() ? new Date() > new Date(ctx.dns.getTokenExpiry()) : null
|
||||
isExpired: ctx.dns.getTokenExpiry() ? new Date() > new Date(ctx.dns.getTokenExpiry()) : null,
|
||||
});
|
||||
}, 'dns-token-status'));
|
||||
|
||||
@@ -394,7 +394,7 @@ module.exports = function(ctx) {
|
||||
return res.json({
|
||||
success: anySuccess,
|
||||
message: anySuccess ? 'Credentials saved for one or more servers' : 'All server credential tests failed',
|
||||
results
|
||||
results,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -430,7 +430,7 @@ module.exports = function(ctx) {
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'DNS credentials saved and verified (encrypted)',
|
||||
tokenExpiry: ctx.dns.getTokenExpiry()
|
||||
tokenExpiry: ctx.dns.getTokenExpiry(),
|
||||
});
|
||||
}, 'dns-credentials'));
|
||||
|
||||
@@ -495,7 +495,7 @@ module.exports = function(ctx) {
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Token refreshed successfully',
|
||||
tokenExpiry: ctx.dns.getTokenExpiry()
|
||||
tokenExpiry: ctx.dns.getTokenExpiry(),
|
||||
});
|
||||
} else {
|
||||
ctx.errorResponse(res, 401, result.error);
|
||||
@@ -529,8 +529,8 @@ module.exports = function(ctx) {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'User-Agent': APP.USER_AGENTS.API
|
||||
}
|
||||
'User-Agent': APP.USER_AGENTS.API,
|
||||
},
|
||||
});
|
||||
|
||||
const text = await response.text();
|
||||
@@ -550,7 +550,7 @@ module.exports = function(ctx) {
|
||||
updateTitle: result.response.updateTitle || null,
|
||||
updateMessage: result.response.updateMessage || null,
|
||||
downloadLink: result.response.downloadLink || null,
|
||||
instructionsLink: result.response.instructionsLink || null
|
||||
instructionsLink: result.response.instructionsLink || null,
|
||||
});
|
||||
} else {
|
||||
ctx.errorResponse(res, 500, result.errorMessage || 'Check failed');
|
||||
@@ -586,7 +586,7 @@ module.exports = function(ctx) {
|
||||
// Check if update is available
|
||||
const checkResponse = await ctx.fetchT(
|
||||
`http://${serverIp}:${dnsPort}/api/user/checkForUpdate?token=${encodeURIComponent(tokenResult.token)}`,
|
||||
{ method: 'GET', headers: { 'Accept': 'application/json' } }
|
||||
{ method: 'GET', headers: { 'Accept': 'application/json' } },
|
||||
);
|
||||
|
||||
const checkText = await checkResponse.text();
|
||||
@@ -604,7 +604,7 @@ module.exports = function(ctx) {
|
||||
success: true,
|
||||
message: 'Already up to date',
|
||||
currentVersion: checkResult.response.currentVersion,
|
||||
updated: false
|
||||
updated: false,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -620,7 +620,7 @@ module.exports = function(ctx) {
|
||||
downloadLink: checkResult.response.downloadLink || null,
|
||||
instructionsLink: checkResult.response.instructionsLink || null,
|
||||
updated: false,
|
||||
manualUpdateRequired: true
|
||||
manualUpdateRequired: true,
|
||||
});
|
||||
} catch (error) {
|
||||
ctx.log.error('dns', 'DNS update error', { error: error.message });
|
||||
|
||||
@@ -25,7 +25,7 @@ module.exports = function(ctx) {
|
||||
return {
|
||||
timestamp: match[1],
|
||||
context: match[2],
|
||||
error: match[3]
|
||||
error: match[3],
|
||||
};
|
||||
}
|
||||
return null;
|
||||
|
||||
@@ -34,7 +34,7 @@ module.exports = function(ctx) {
|
||||
|
||||
try {
|
||||
let url = null;
|
||||
let checkType = 'http';
|
||||
const checkType = 'http';
|
||||
|
||||
// Determine URL to check
|
||||
url = resolveServiceUrl(serviceId, service, ctx.siteConfig, ctx.buildServiceUrl);
|
||||
@@ -52,7 +52,7 @@ module.exports = function(ctx) {
|
||||
const response = await ctx.fetchT(url, {
|
||||
method: 'HEAD',
|
||||
signal: controller.signal,
|
||||
redirect: 'follow'
|
||||
redirect: 'follow',
|
||||
});
|
||||
clearTimeout(timeout);
|
||||
|
||||
@@ -60,7 +60,7 @@ module.exports = function(ctx) {
|
||||
status: response.ok || response.status < 500 ? 'healthy' : 'unhealthy',
|
||||
statusCode: response.status,
|
||||
url,
|
||||
checkedAt: new Date().toISOString()
|
||||
checkedAt: new Date().toISOString(),
|
||||
};
|
||||
} catch (fetchError) {
|
||||
clearTimeout(timeout);
|
||||
@@ -73,7 +73,7 @@ module.exports = function(ctx) {
|
||||
const getResponse = await ctx.fetchT(url, {
|
||||
method: 'GET',
|
||||
signal: getController.signal,
|
||||
redirect: 'follow'
|
||||
redirect: 'follow',
|
||||
});
|
||||
clearTimeout(getTimeout);
|
||||
|
||||
@@ -81,14 +81,14 @@ module.exports = function(ctx) {
|
||||
status: getResponse.ok || getResponse.status < 500 ? 'healthy' : 'unhealthy',
|
||||
statusCode: getResponse.status,
|
||||
url,
|
||||
checkedAt: new Date().toISOString()
|
||||
checkedAt: new Date().toISOString(),
|
||||
};
|
||||
} catch (e) {
|
||||
health[serviceId] = {
|
||||
status: 'unhealthy',
|
||||
reason: e.name === 'AbortError' ? 'Timeout' : e.message,
|
||||
url,
|
||||
checkedAt: new Date().toISOString()
|
||||
checkedAt: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -96,7 +96,7 @@ module.exports = function(ctx) {
|
||||
health[serviceId] = {
|
||||
status: 'error',
|
||||
reason: e.message,
|
||||
checkedAt: new Date().toISOString()
|
||||
checkedAt: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
}));
|
||||
@@ -113,7 +113,7 @@ module.exports = function(ctx) {
|
||||
success: true,
|
||||
health: paginatedHealth,
|
||||
checkedAt: lastHealthCheck,
|
||||
...(result.pagination && { pagination: result.pagination })
|
||||
...(result.pagination && { pagination: result.pagination }),
|
||||
});
|
||||
}, 'health-services'));
|
||||
|
||||
@@ -123,7 +123,7 @@ module.exports = function(ctx) {
|
||||
success: true,
|
||||
health: serviceHealthCache,
|
||||
lastCheck: lastHealthCheck,
|
||||
cacheAge: lastHealthCheck ? Date.now() - new Date(lastHealthCheck).getTime() : null
|
||||
cacheAge: lastHealthCheck ? Date.now() - new Date(lastHealthCheck).getTime() : null,
|
||||
});
|
||||
}, 'health-cached'));
|
||||
|
||||
@@ -157,7 +157,7 @@ module.exports = function(ctx) {
|
||||
const response = await ctx.fetchT(url, {
|
||||
method: 'GET',
|
||||
signal: controller.signal,
|
||||
redirect: 'follow'
|
||||
redirect: 'follow',
|
||||
});
|
||||
clearTimeout(timeout);
|
||||
|
||||
@@ -168,8 +168,8 @@ module.exports = function(ctx) {
|
||||
status: response.ok || response.status < 500 ? 'healthy' : 'unhealthy',
|
||||
statusCode: response.status,
|
||||
url,
|
||||
checkedAt: new Date().toISOString()
|
||||
}
|
||||
checkedAt: new Date().toISOString(),
|
||||
},
|
||||
});
|
||||
} catch (e) {
|
||||
clearTimeout(timeout);
|
||||
@@ -180,8 +180,8 @@ module.exports = function(ctx) {
|
||||
status: 'unhealthy',
|
||||
reason: e.name === 'AbortError' ? 'Timeout' : e.message,
|
||||
url,
|
||||
checkedAt: new Date().toISOString()
|
||||
}
|
||||
checkedAt: new Date().toISOString(),
|
||||
},
|
||||
});
|
||||
}
|
||||
}, 'health-service'));
|
||||
@@ -201,7 +201,7 @@ module.exports = function(ctx) {
|
||||
return res.json({
|
||||
status: 'error',
|
||||
message: 'Root CA certificate not found',
|
||||
daysUntilExpiration: null
|
||||
daysUntilExpiration: null,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -232,14 +232,14 @@ module.exports = function(ctx) {
|
||||
status: status,
|
||||
message: message,
|
||||
daysUntilExpiration: daysUntilExpiration,
|
||||
expiresAt: notAfter
|
||||
expiresAt: notAfter,
|
||||
});
|
||||
} catch (error) {
|
||||
await ctx.logError('GET /api/health/ca', error);
|
||||
res.json({
|
||||
status: 'error',
|
||||
message: error.message,
|
||||
daysUntilExpiration: null
|
||||
daysUntilExpiration: null,
|
||||
});
|
||||
}
|
||||
}, 'health-ca'));
|
||||
|
||||
@@ -16,7 +16,7 @@ module.exports = function(ctx) {
|
||||
res.json({
|
||||
success: true,
|
||||
message: result.message,
|
||||
license: result.activation
|
||||
license: result.activation,
|
||||
});
|
||||
} else {
|
||||
ctx.errorResponse(res, 400, result.message);
|
||||
@@ -53,8 +53,8 @@ module.exports = function(ctx) {
|
||||
tier: status.tier,
|
||||
...(available ? {} : {
|
||||
upgradeUrl: '/settings#license',
|
||||
message: `${status.premiumFeatures[feature]?.name || feature} requires DashCaddy Premium`
|
||||
})
|
||||
message: `${status.premiumFeatures[feature]?.name || feature} requires DashCaddy Premium`,
|
||||
}),
|
||||
});
|
||||
}, 'license-feature-check'));
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ module.exports = function(ctx) {
|
||||
name: c.Names[0]?.replace(/^\//, '') || 'unknown',
|
||||
image: c.Image,
|
||||
status: c.State,
|
||||
created: c.Created
|
||||
created: c.Created,
|
||||
}));
|
||||
|
||||
const paginationParams = parsePaginationParams(req.query);
|
||||
@@ -46,7 +46,7 @@ module.exports = function(ctx) {
|
||||
|
||||
const logs = await container.logs({
|
||||
stdout: true, stderr: true,
|
||||
tail, since, timestamps
|
||||
tail, since, timestamps,
|
||||
});
|
||||
|
||||
// Parse Docker log stream (demultiplex stdout/stderr)
|
||||
@@ -65,7 +65,7 @@ module.exports = function(ctx) {
|
||||
if (line) {
|
||||
lines.push({
|
||||
stream: streamType === 2 ? 'stderr' : 'stdout',
|
||||
text: line
|
||||
text: line,
|
||||
});
|
||||
}
|
||||
offset += 8 + size;
|
||||
@@ -75,7 +75,7 @@ module.exports = function(ctx) {
|
||||
success: true,
|
||||
containerId, containerName,
|
||||
logs: lines,
|
||||
count: lines.length
|
||||
count: lines.length,
|
||||
});
|
||||
}, 'logs-container'));
|
||||
|
||||
@@ -100,7 +100,7 @@ module.exports = function(ctx) {
|
||||
|
||||
const logStream = await container.logs({
|
||||
stdout: true, stderr: true,
|
||||
follow: true, tail: 50, timestamps: true
|
||||
follow: true, tail: 50, timestamps: true,
|
||||
});
|
||||
|
||||
let buffer = Buffer.alloc(0);
|
||||
@@ -119,7 +119,7 @@ module.exports = function(ctx) {
|
||||
const data = JSON.stringify({
|
||||
stream: streamType === 2 ? 'stderr' : 'stdout',
|
||||
text: line,
|
||||
timestamp: new Date().toISOString()
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
res.write(`data: ${data}\n\n`);
|
||||
}
|
||||
@@ -248,7 +248,7 @@ module.exports = function(ctx) {
|
||||
const logs = tailLines.map(line => ({
|
||||
stream: 'stdout',
|
||||
text: line,
|
||||
timestamp: extractTimestamp(line)
|
||||
timestamp: extractTimestamp(line),
|
||||
}));
|
||||
|
||||
res.json({
|
||||
@@ -256,7 +256,7 @@ module.exports = function(ctx) {
|
||||
logPath: normalizedPath,
|
||||
logs,
|
||||
count: logs.length,
|
||||
totalLines: lines.length
|
||||
totalLines: lines.length,
|
||||
});
|
||||
}, 'logs-file'));
|
||||
|
||||
|
||||
@@ -96,17 +96,17 @@ module.exports = function(ctx) {
|
||||
image: containerInfo.Image,
|
||||
status: containerInfo.State,
|
||||
cpu: {
|
||||
percent: Math.round(cpuPercent * 100) / 100
|
||||
percent: Math.round(cpuPercent * 100) / 100,
|
||||
},
|
||||
memory: {
|
||||
used: memUsage,
|
||||
limit: memLimit,
|
||||
percent: Math.round(memPercent * 100) / 100
|
||||
percent: Math.round(memPercent * 100) / 100,
|
||||
},
|
||||
network: {
|
||||
rx: netRx,
|
||||
tx: netTx
|
||||
}
|
||||
tx: netTx,
|
||||
},
|
||||
});
|
||||
} catch (e) {
|
||||
// Skip containers we can't get stats for
|
||||
@@ -151,15 +151,15 @@ module.exports = function(ctx) {
|
||||
status: info.State.Status,
|
||||
started: info.State.StartedAt,
|
||||
cpu: {
|
||||
percent: Math.round(cpuPercent * 100) / 100
|
||||
percent: Math.round(cpuPercent * 100) / 100,
|
||||
},
|
||||
memory: {
|
||||
used: memUsage,
|
||||
limit: memLimit,
|
||||
percent: Math.round((memUsage / memLimit) * 100 * 100) / 100
|
||||
percent: Math.round((memUsage / memLimit) * 100 * 100) / 100,
|
||||
},
|
||||
network: { rx: netRx, tx: netTx },
|
||||
},
|
||||
network: { rx: netRx, tx: netTx }
|
||||
}
|
||||
});
|
||||
}, 'stats-container'));
|
||||
|
||||
|
||||
@@ -14,20 +14,20 @@ module.exports = function(ctx) {
|
||||
providers: {
|
||||
discord: {
|
||||
enabled: notificationConfig.providers.discord?.enabled || false,
|
||||
configured: !!notificationConfig.providers.discord?.webhookUrl
|
||||
configured: !!notificationConfig.providers.discord?.webhookUrl,
|
||||
},
|
||||
telegram: {
|
||||
enabled: notificationConfig.providers.telegram?.enabled || false,
|
||||
configured: !!(notificationConfig.providers.telegram?.botToken && notificationConfig.providers.telegram?.chatId)
|
||||
configured: !!(notificationConfig.providers.telegram?.botToken && notificationConfig.providers.telegram?.chatId),
|
||||
},
|
||||
ntfy: {
|
||||
enabled: notificationConfig.providers.ntfy?.enabled || false,
|
||||
configured: !!notificationConfig.providers.ntfy?.topic,
|
||||
serverUrl: notificationConfig.providers.ntfy?.serverUrl || 'https://ntfy.sh'
|
||||
}
|
||||
serverUrl: notificationConfig.providers.ntfy?.serverUrl || 'https://ntfy.sh',
|
||||
},
|
||||
},
|
||||
events: notificationConfig.events,
|
||||
healthCheck: notificationConfig.healthCheck
|
||||
healthCheck: notificationConfig.healthCheck,
|
||||
};
|
||||
res.json({ success: true, config: safeConfig });
|
||||
}, 'notifications-config-get'));
|
||||
@@ -78,19 +78,19 @@ module.exports = function(ctx) {
|
||||
if (providers.discord) {
|
||||
notificationConfig.providers.discord = {
|
||||
...notificationConfig.providers.discord,
|
||||
...providers.discord
|
||||
...providers.discord,
|
||||
};
|
||||
}
|
||||
if (providers.telegram) {
|
||||
notificationConfig.providers.telegram = {
|
||||
...notificationConfig.providers.telegram,
|
||||
...providers.telegram
|
||||
...providers.telegram,
|
||||
};
|
||||
}
|
||||
if (providers.ntfy) {
|
||||
notificationConfig.providers.ntfy = {
|
||||
...notificationConfig.providers.ntfy,
|
||||
...providers.ntfy
|
||||
...providers.ntfy,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -159,7 +159,7 @@ module.exports = function(ctx) {
|
||||
res.json({
|
||||
success: true,
|
||||
history: notificationHistory.slice(0, limit),
|
||||
total: notificationHistory.length
|
||||
total: notificationHistory.length,
|
||||
});
|
||||
}
|
||||
}, 'notifications-history'));
|
||||
@@ -177,7 +177,7 @@ module.exports = function(ctx) {
|
||||
res.json({
|
||||
success: true,
|
||||
lastCheck: notificationConfig.healthCheck.lastCheck,
|
||||
containersMonitored: Object.keys(ctx.notification.getHealthState()).length
|
||||
containersMonitored: Object.keys(ctx.notification.getHealthState()).length,
|
||||
});
|
||||
}, 'notifications-health-check'));
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@ module.exports = function(ctx) {
|
||||
await ctx.docker.client.createNetwork({
|
||||
Name: networkName,
|
||||
Driver: recipe.network.driver || 'bridge',
|
||||
Labels: { 'sami.managed': 'true', 'sami.recipe': recipeId }
|
||||
Labels: { 'sami.managed': 'true', 'sami.recipe': recipeId },
|
||||
});
|
||||
ctx.log.info('recipe', 'Created Docker network', { networkName });
|
||||
} catch (e) {
|
||||
@@ -62,18 +62,18 @@ module.exports = function(ctx) {
|
||||
try {
|
||||
ctx.log.info('recipe', `Deploying component: ${component.id}`, {
|
||||
role: component.role,
|
||||
internal: component.internal || false
|
||||
internal: component.internal || false,
|
||||
});
|
||||
|
||||
const result = await deployComponent(component, recipe, config, generatedPasswords, networkName);
|
||||
deployedComponents.push(result);
|
||||
|
||||
ctx.log.info('recipe', `Component deployed: ${component.id}`, {
|
||||
containerId: result.containerId?.substring(0, 12)
|
||||
containerId: result.containerId?.substring(0, 12),
|
||||
});
|
||||
} catch (componentError) {
|
||||
ctx.log.error('recipe', `Component failed: ${component.id}`, {
|
||||
error: componentError.message
|
||||
error: componentError.message,
|
||||
});
|
||||
errors.push({ componentId: component.id, role: component.role, error: componentError.message });
|
||||
// Continue deploying other components — partial success is better than total failure
|
||||
@@ -96,7 +96,7 @@ module.exports = function(ctx) {
|
||||
recipeId: recipeId,
|
||||
recipeRole: deployed.role,
|
||||
tailscaleOnly: config.sharedConfig?.tailscaleOnly || false,
|
||||
deployedAt: new Date().toISOString()
|
||||
deployedAt: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -119,18 +119,18 @@ module.exports = function(ctx) {
|
||||
role: c.role,
|
||||
containerId: c.containerId?.substring(0, 12),
|
||||
url: c.url,
|
||||
internal: c.internal
|
||||
internal: c.internal,
|
||||
})),
|
||||
errors: errors.length > 0 ? errors : undefined,
|
||||
message: errors.length > 0
|
||||
? `${recipe.name} partially deployed (${deployedComponents.length}/${componentsToDeploy.length} components)`
|
||||
: `${recipe.name} deployed successfully!`,
|
||||
setupInstructions: recipe.setupInstructions
|
||||
setupInstructions: recipe.setupInstructions,
|
||||
};
|
||||
|
||||
ctx.notification.send('deploymentSuccess', 'Recipe Deployed',
|
||||
`**${recipe.name}** recipe deployed (${deployedComponents.length} components).`,
|
||||
'success'
|
||||
'success',
|
||||
);
|
||||
|
||||
res.json(response);
|
||||
@@ -146,7 +146,7 @@ module.exports = function(ctx) {
|
||||
}
|
||||
} catch (cleanupError) {
|
||||
ctx.log.warn('recipe', 'Cleanup failed for component', {
|
||||
componentId: deployed.id, error: cleanupError.message
|
||||
componentId: deployed.id, error: cleanupError.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -162,7 +162,7 @@ module.exports = function(ctx) {
|
||||
}
|
||||
|
||||
ctx.notification.send('deploymentFailed', 'Recipe Failed',
|
||||
`Failed to deploy **${recipe.name}**: ${error.message}`, 'error'
|
||||
`Failed to deploy **${recipe.name}**: ${error.message}`, 'error',
|
||||
);
|
||||
|
||||
ctx.errorResponse(res, 500, error.message);
|
||||
@@ -254,7 +254,7 @@ module.exports = function(ctx) {
|
||||
HostConfig: {
|
||||
PortBindings: {},
|
||||
Binds: dockerConfig.volumes || [],
|
||||
RestartPolicy: { Name: 'unless-stopped' }
|
||||
RestartPolicy: { Name: 'unless-stopped' },
|
||||
},
|
||||
Env: Object.entries(dockerConfig.environment || {}).map(([k, v]) => `${k}=${v}`),
|
||||
Labels: {
|
||||
@@ -264,8 +264,8 @@ module.exports = function(ctx) {
|
||||
'sami.recipe.component': component.id,
|
||||
'sami.recipe.role': component.role,
|
||||
'sami.subdomain': subdomain,
|
||||
'sami.deployed': new Date().toISOString()
|
||||
}
|
||||
'sami.deployed': new Date().toISOString(),
|
||||
},
|
||||
};
|
||||
|
||||
// Configure ports
|
||||
@@ -288,7 +288,7 @@ module.exports = function(ctx) {
|
||||
} catch (e) {
|
||||
ctx.log.warn('recipe', `Pull failed, checking local: ${dockerConfig.image}`);
|
||||
const images = await ctx.docker.client.listImages({
|
||||
filters: { reference: [dockerConfig.image] }
|
||||
filters: { reference: [dockerConfig.image] },
|
||||
});
|
||||
if (images.length === 0) throw new Error(`Image not found: ${dockerConfig.image}`);
|
||||
}
|
||||
@@ -324,7 +324,7 @@ module.exports = function(ctx) {
|
||||
const primaryPort = port || dockerConfig.ports[0].split(/[:/]/)[0];
|
||||
const caddyConfig = ctx.caddy.generateConfig(
|
||||
subdomain, hostIp, primaryPort,
|
||||
{ tailscaleOnly: sharedConfig.tailscaleOnly || false }
|
||||
{ tailscaleOnly: sharedConfig.tailscaleOnly || false },
|
||||
);
|
||||
try {
|
||||
const helpers = require('../apps/helpers')(ctx);
|
||||
@@ -344,7 +344,7 @@ module.exports = function(ctx) {
|
||||
internal: component.internal || false,
|
||||
templateRef: component.templateRef,
|
||||
logo,
|
||||
url
|
||||
url,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -29,9 +29,9 @@ module.exports = function(ctx) {
|
||||
required: c.required,
|
||||
internal: c.internal || false,
|
||||
templateRef: c.templateRef || null,
|
||||
note: c.note || null
|
||||
note: c.note || null,
|
||||
})),
|
||||
setupInstructions: recipe.setupInstructions
|
||||
setupInstructions: recipe.setupInstructions,
|
||||
}));
|
||||
|
||||
res.json({ success: true, templates, categories: RECIPE_CATEGORIES });
|
||||
|
||||
@@ -16,7 +16,7 @@ module.exports = function(ctx) {
|
||||
if (!recipeGroups[service.recipeId]) {
|
||||
recipeGroups[service.recipeId] = {
|
||||
recipeId: service.recipeId,
|
||||
components: []
|
||||
components: [],
|
||||
};
|
||||
}
|
||||
recipeGroups[service.recipeId].components.push({
|
||||
@@ -25,7 +25,7 @@ module.exports = function(ctx) {
|
||||
logo: service.logo,
|
||||
containerId: service.containerId,
|
||||
recipeRole: service.recipeRole,
|
||||
deployedAt: service.deployedAt
|
||||
deployedAt: service.deployedAt,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ module.exports = function(ctx) {
|
||||
|
||||
// Check if this container is already listed (by containerId)
|
||||
const existing = recipeGroups[recipeId].components.find(
|
||||
c => c.containerId === containerInfo.Id
|
||||
c => c.containerId === containerInfo.Id,
|
||||
);
|
||||
if (existing) continue;
|
||||
|
||||
@@ -59,7 +59,7 @@ module.exports = function(ctx) {
|
||||
recipeRole: labels['sami.recipe.role'] || 'Unknown',
|
||||
internal: true,
|
||||
state: containerInfo.State,
|
||||
status: containerInfo.Status
|
||||
status: containerInfo.Status,
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
@@ -242,7 +242,7 @@ module.exports = function(ctx) {
|
||||
|
||||
ctx.notification.send('recipeRemoved', 'Recipe Removed',
|
||||
`Removed **${recipeId}** recipe (${results.filter(r => r.status === 'removed').length} containers).`,
|
||||
'info'
|
||||
'info',
|
||||
);
|
||||
|
||||
ctx.log.info('recipe', 'Recipe removed', { recipeId, results });
|
||||
@@ -271,7 +271,7 @@ module.exports = function(ctx) {
|
||||
Id: c.Id,
|
||||
component: c.Labels['sami.recipe.component'] || c.Names[0]?.replace('/', ''),
|
||||
role: c.Labels['sami.recipe.role'] || 'Unknown',
|
||||
state: c.State
|
||||
state: c.State,
|
||||
}));
|
||||
}
|
||||
|
||||
@@ -293,7 +293,7 @@ module.exports = function(ctx) {
|
||||
*/
|
||||
async function removeCaddyBlock(subdomain) {
|
||||
const domain = ctx.buildDomain(subdomain);
|
||||
let content = await ctx.caddy.read();
|
||||
const content = await ctx.caddy.read();
|
||||
|
||||
// Find and remove the block for this domain
|
||||
const escapedDomain = domain.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
|
||||
@@ -99,7 +99,7 @@ module.exports = function(ctx) {
|
||||
isUp: false,
|
||||
statusCode: 502,
|
||||
responseTime,
|
||||
error: error.message
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -108,7 +108,7 @@ module.exports = function(ctx) {
|
||||
isUp: isServiceUp(statusCode),
|
||||
statusCode,
|
||||
responseTime,
|
||||
url
|
||||
url,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -169,7 +169,7 @@ module.exports = function(ctx) {
|
||||
success: true,
|
||||
hasApiKey: !!(arrKey || svcKey),
|
||||
hasBasicAuth: !!username,
|
||||
username: username || null
|
||||
username: username || null,
|
||||
});
|
||||
} catch (error) {
|
||||
res.json({ success: true, hasApiKey: false, hasBasicAuth: false });
|
||||
@@ -249,7 +249,7 @@ module.exports = function(ctx) {
|
||||
services.forEach(service => addId(service.id));
|
||||
|
||||
const statusResults = await mapWithConcurrency(ids, PROBE_CONCURRENCY, (id) =>
|
||||
probeServiceStatus(id, serviceMap.get(id))
|
||||
probeServiceStatus(id, serviceMap.get(id)),
|
||||
);
|
||||
|
||||
const statuses = {};
|
||||
@@ -261,7 +261,7 @@ module.exports = function(ctx) {
|
||||
res.json({
|
||||
success: true,
|
||||
checkedAt: new Date().toISOString(),
|
||||
statuses
|
||||
statuses,
|
||||
});
|
||||
}, 'services-status'));
|
||||
|
||||
@@ -343,7 +343,7 @@ module.exports = function(ctx) {
|
||||
res.json({
|
||||
success: true,
|
||||
message: `Successfully imported ${services.length} services`,
|
||||
count: services.length
|
||||
count: services.length,
|
||||
});
|
||||
}, 'services-import'));
|
||||
|
||||
@@ -396,12 +396,12 @@ module.exports = function(ctx) {
|
||||
const oldDomain = ctx.buildDomain(oldSubdomain);
|
||||
const newDomain = ctx.buildDomain(newSubdomain);
|
||||
|
||||
let content = await ctx.caddy.read();
|
||||
const content = await ctx.caddy.read();
|
||||
|
||||
const escapedOldDomain = oldDomain.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
const siteBlockRegex = new RegExp(
|
||||
`${escapedOldDomain}\\s*\\{[^{}]*(?:\\{[^{}]*(?:\\{[^{}]*\\}[^{}]*)*\\}[^{}]*)*\\}`,
|
||||
's'
|
||||
's',
|
||||
);
|
||||
|
||||
const oldBlockMatch = content.match(siteBlockRegex);
|
||||
@@ -414,7 +414,7 @@ module.exports = function(ctx) {
|
||||
const finalPort = port || existingPort;
|
||||
|
||||
const newConfig = ctx.caddy.generateConfig(newSubdomain, finalIp, finalPort, {
|
||||
tailscaleOnly: tailscaleOnly || false
|
||||
tailscaleOnly: tailscaleOnly || false,
|
||||
});
|
||||
|
||||
const caddyResult = await ctx.caddy.modify(c => c.replace(siteBlockRegex, newConfig));
|
||||
@@ -445,7 +445,7 @@ module.exports = function(ctx) {
|
||||
id: newSubdomain,
|
||||
port: port || services[serviceIndex].port,
|
||||
ip: ip || services[serviceIndex].ip,
|
||||
tailscaleOnly: tailscaleOnly || false
|
||||
tailscaleOnly: tailscaleOnly || false,
|
||||
};
|
||||
results.services = 'updated';
|
||||
} else {
|
||||
@@ -459,7 +459,7 @@ module.exports = function(ctx) {
|
||||
res.json({
|
||||
success: true,
|
||||
message: `Service updated: ${oldSubdomain} -> ${newSubdomain}`,
|
||||
results
|
||||
results,
|
||||
});
|
||||
}, 'services-update'));
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ module.exports = function(ctx) {
|
||||
const response = await ctx.fetchT(`${ctx.caddy.adminUrl}/load`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': CADDY.CONTENT_TYPE },
|
||||
body: caddyfileContent
|
||||
body: caddyfileContent,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
@@ -110,7 +110,7 @@ module.exports = function(ctx) {
|
||||
const caList = cas.map(ca => ({
|
||||
id: ca.id || ca.name,
|
||||
name: ca.name,
|
||||
displayName: ca.name !== (ca.id || ca.name) ? `${ca.name} (${ca.id || ca.name})` : ca.name
|
||||
displayName: ca.name !== (ca.id || ca.name) ? `${ca.name} (${ca.id || ca.name})` : ca.name,
|
||||
}));
|
||||
res.json({ status: 'success', data: { cas: caList } });
|
||||
}, 'caddy-get-cas'));
|
||||
@@ -123,7 +123,7 @@ module.exports = function(ctx) {
|
||||
const result = await ctx.caddy.modify((content) => {
|
||||
const escapedDomain = domain.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
const siteBlockRegex = new RegExp(
|
||||
`\\n?${escapedDomain}\\s*\\{[^{}]*(?:\\{[^{}]*(?:\\{[^{}]*\\}[^{}]*)*\\}[^{}]*)*\\}\\s*`, 'g'
|
||||
`\\n?${escapedDomain}\\s*\\{[^{}]*(?:\\{[^{}]*(?:\\{[^{}]*\\}[^{}]*)*\\}[^{}]*)*\\}\\s*`, 'g',
|
||||
);
|
||||
const modified = content.replace(siteBlockRegex, '\n');
|
||||
if (modified.length === content.length) return null;
|
||||
@@ -149,7 +149,7 @@ module.exports = function(ctx) {
|
||||
const upstreamRegex = /^[a-z0-9.-]+:\d{1,5}$/i;
|
||||
if (!upstreamRegex.test(upstream)) return ctx.errorResponse(res, 400, 'Invalid upstream format. Use host:port');
|
||||
|
||||
let content = await ctx.caddy.read();
|
||||
const content = await ctx.caddy.read();
|
||||
const escapedDomain = domain.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
const siteBlockRegex = new RegExp(`\\n?${escapedDomain}\\s*\\{`, 'g');
|
||||
if (siteBlockRegex.test(content)) {
|
||||
@@ -200,7 +200,7 @@ module.exports = function(ctx) {
|
||||
}
|
||||
|
||||
const sslConfig = sslType === 'letsencrypt' ? '' : 'tls internal';
|
||||
const hostHeader = preserveHost ? `\n header_up Host {upstream_hostport}` : '';
|
||||
const hostHeader = preserveHost ? '\n header_up Host {upstream_hostport}' : '';
|
||||
|
||||
const urlObj = new URL(externalUrl);
|
||||
|
||||
@@ -238,7 +238,7 @@ module.exports = function(ctx) {
|
||||
await ctx.addServiceToConfig({
|
||||
id: subdomain, name: serviceName, logo,
|
||||
isExternal: true, externalUrl,
|
||||
deployedAt: new Date().toISOString()
|
||||
deployedAt: new Date().toISOString(),
|
||||
});
|
||||
ctx.log.info('deploy', 'Service added to dashboard', { subdomain });
|
||||
} catch (serviceError) {
|
||||
@@ -248,7 +248,7 @@ module.exports = function(ctx) {
|
||||
|
||||
const response = {
|
||||
success: true,
|
||||
message: `External service proxy for ${domain} -> ${externalUrl} created${shouldReload ? ' and Caddy reloaded' : ''}`
|
||||
message: `External service proxy for ${domain} -> ${externalUrl} created${shouldReload ? ' and Caddy reloaded' : ''}`,
|
||||
};
|
||||
if (dnsWarning) response.warning = dnsWarning;
|
||||
res.json(response);
|
||||
|
||||
@@ -16,7 +16,7 @@ module.exports = function(ctx) {
|
||||
success: true,
|
||||
installed: false,
|
||||
connected: false,
|
||||
message: 'Tailscale not available or not running'
|
||||
message: 'Tailscale not available or not running',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -30,7 +30,7 @@ module.exports = function(ctx) {
|
||||
os: peer.OS,
|
||||
online: peer.Online,
|
||||
lastSeen: peer.LastSeen,
|
||||
user: peer.UserID
|
||||
user: peer.UserID,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -44,11 +44,11 @@ module.exports = function(ctx) {
|
||||
hostname: status.Self?.HostName,
|
||||
ip: localIP,
|
||||
tailnetName: status.MagicDNSSuffix,
|
||||
online: status.Self?.Online
|
||||
online: status.Self?.Online,
|
||||
},
|
||||
config: ctx.tailscale.config,
|
||||
devices,
|
||||
deviceCount: devices.length
|
||||
deviceCount: devices.length,
|
||||
});
|
||||
}, 'tailscale-status'));
|
||||
|
||||
@@ -65,7 +65,7 @@ module.exports = function(ctx) {
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Tailscale configuration updated',
|
||||
config: ctx.tailscale.config
|
||||
config: ctx.tailscale.config,
|
||||
});
|
||||
}, 'tailscale-config'));
|
||||
|
||||
@@ -83,7 +83,7 @@ module.exports = function(ctx) {
|
||||
isTailscale,
|
||||
clientIP,
|
||||
forwardedFor: forwardedFor || null,
|
||||
realIP: realIP || null
|
||||
realIP: realIP || null,
|
||||
});
|
||||
}, 'tailscale-check'));
|
||||
|
||||
@@ -102,7 +102,7 @@ module.exports = function(ctx) {
|
||||
hostname: peer.HostName,
|
||||
ip: peer.TailscaleIPs?.[0],
|
||||
os: peer.OS,
|
||||
user: peer.UserID
|
||||
user: peer.UserID,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -114,7 +114,7 @@ module.exports = function(ctx) {
|
||||
ip: status.Self.TailscaleIPs?.[0],
|
||||
os: status.Self.OS,
|
||||
user: status.Self.UserID,
|
||||
isSelf: true
|
||||
isSelf: true,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -129,7 +129,7 @@ module.exports = function(ctx) {
|
||||
return ctx.errorResponse(res, 400, 'subdomain is required');
|
||||
}
|
||||
|
||||
let content = await ctx.caddy.read();
|
||||
const content = await ctx.caddy.read();
|
||||
const domain = ctx.buildDomain(subdomain);
|
||||
|
||||
const blockRegex = new RegExp(`(${domain.replace('.', '\\.')}\\s*\\{[^}]*\\})`, 's');
|
||||
@@ -149,7 +149,7 @@ module.exports = function(ctx) {
|
||||
|
||||
const newConfig = ctx.caddy.generateConfig(subdomain, ip, port || '80', {
|
||||
tailscaleOnly: tailscaleOnly !== false,
|
||||
allowedIPs: allowedIPs || []
|
||||
allowedIPs: allowedIPs || [],
|
||||
});
|
||||
|
||||
const caddyResult = await ctx.caddy.modify(c => c.replace(blockRegex, newConfig));
|
||||
@@ -170,7 +170,7 @@ module.exports = function(ctx) {
|
||||
res.json({
|
||||
success: true,
|
||||
message: `Service ${domain} is now ${tailscaleOnly !== false ? 'protected by' : 'no longer restricted to'} Tailscale`,
|
||||
tailscaleOnly: tailscaleOnly !== false
|
||||
tailscaleOnly: tailscaleOnly !== false,
|
||||
});
|
||||
}, 'tailscale-protect'));
|
||||
|
||||
@@ -188,7 +188,7 @@ module.exports = function(ctx) {
|
||||
const tokenRes = await fetch(TAILSCALE.OAUTH_TOKEN_URL, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
|
||||
body: `client_id=${encodeURIComponent(clientId)}&client_secret=${encodeURIComponent(clientSecret)}&grant_type=client_credentials`
|
||||
body: `client_id=${encodeURIComponent(clientId)}&client_secret=${encodeURIComponent(clientSecret)}&grant_type=client_credentials`,
|
||||
});
|
||||
|
||||
if (!tokenRes.ok) {
|
||||
@@ -199,7 +199,7 @@ module.exports = function(ctx) {
|
||||
|
||||
// Test with the device list to verify scopes
|
||||
const testRes = await fetch(`${TAILSCALE.API_BASE}/tailnet/${encodeURIComponent(tailnet)}/devices`, {
|
||||
headers: { Authorization: `Bearer ${tokenData.access_token}` }
|
||||
headers: { Authorization: `Bearer ${tokenData.access_token}` },
|
||||
});
|
||||
|
||||
if (!testRes.ok) {
|
||||
@@ -259,7 +259,7 @@ module.exports = function(ctx) {
|
||||
res.json({
|
||||
success: true,
|
||||
devices: ctx.tailscale.config.devices || [],
|
||||
lastSync: ctx.tailscale.config.lastSync
|
||||
lastSync: ctx.tailscale.config.lastSync,
|
||||
});
|
||||
}, 'tailscale-api-devices'));
|
||||
|
||||
@@ -274,7 +274,7 @@ module.exports = function(ctx) {
|
||||
res.json({
|
||||
success: true,
|
||||
devices: devices || [],
|
||||
lastSync: ctx.tailscale.config.lastSync
|
||||
lastSync: ctx.tailscale.config.lastSync,
|
||||
});
|
||||
}, 'tailscale-sync'));
|
||||
|
||||
@@ -287,7 +287,7 @@ module.exports = function(ctx) {
|
||||
}
|
||||
|
||||
const aclRes = await fetch(`${TAILSCALE.API_BASE}/tailnet/${encodeURIComponent(tailnet)}/acl`, {
|
||||
headers: { Authorization: `Bearer ${token}`, Accept: 'application/json' }
|
||||
headers: { Authorization: `Bearer ${token}`, Accept: 'application/json' },
|
||||
});
|
||||
if (!aclRes.ok) {
|
||||
return ctx.errorResponse(res, aclRes.status, `ACL fetch failed: HTTP ${aclRes.status}`);
|
||||
@@ -299,7 +299,7 @@ module.exports = function(ctx) {
|
||||
groups: Object.keys(acl.groups || {}),
|
||||
tagOwners: Object.keys(acl.tagOwners || {}),
|
||||
aclRuleCount: (acl.acls || []).length,
|
||||
sshRuleCount: (acl.ssh || []).length
|
||||
sshRuleCount: (acl.ssh || []).length,
|
||||
};
|
||||
|
||||
res.json({ success: true, acl, summary });
|
||||
|
||||
@@ -46,15 +46,15 @@ module.exports = function(ctx) {
|
||||
|
||||
const themeData = { name, ...colors };
|
||||
if (lightBg) themeData.lightBg = true;
|
||||
fs.writeFileSync(path.join(THEMES_DIR, slug + '.json'), JSON.stringify(themeData, null, 2), 'utf8');
|
||||
fs.writeFileSync(path.join(THEMES_DIR, `${slug }.json`), JSON.stringify(themeData, null, 2), 'utf8');
|
||||
|
||||
res.json({ success: true, message: name + ' theme saved' });
|
||||
res.json({ success: true, message: `${name } theme saved` });
|
||||
});
|
||||
|
||||
// Delete a theme
|
||||
router.delete('/themes/:slug', (req, res) => {
|
||||
const { slug } = req.params;
|
||||
const filePath = path.join(THEMES_DIR, slug + '.json');
|
||||
const filePath = path.join(THEMES_DIR, `${slug }.json`);
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return res.status(404).json({ success: false, error: 'Theme not found' });
|
||||
@@ -64,7 +64,7 @@ module.exports = function(ctx) {
|
||||
const name = data.name || slug;
|
||||
fs.unlinkSync(filePath);
|
||||
|
||||
res.json({ success: true, message: name + ' theme deleted' });
|
||||
res.json({ success: true, message: `${name } theme deleted` });
|
||||
});
|
||||
|
||||
return router;
|
||||
|
||||
@@ -31,7 +31,7 @@ let buildRunning = false;
|
||||
function log(msg) {
|
||||
const line = `[webhook] ${new Date().toISOString()} ${msg}`;
|
||||
console.log(line);
|
||||
fs.appendFileSync(LOG_FILE, line + '\n');
|
||||
fs.appendFileSync(LOG_FILE, `${line }\n`);
|
||||
}
|
||||
|
||||
function verifySignature(body, signature) {
|
||||
@@ -39,7 +39,7 @@ function verifySignature(body, signature) {
|
||||
const hmac = crypto.createHmac('sha256', SECRET).update(body).digest('hex');
|
||||
return crypto.timingSafeEqual(
|
||||
Buffer.from(signature),
|
||||
Buffer.from(hmac)
|
||||
Buffer.from(hmac),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -124,7 +124,7 @@ const server = http.createServer((req, res) => {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ accepted: true }));
|
||||
} catch (e) {
|
||||
log('Failed to parse webhook payload: ' + e.message);
|
||||
log(`Failed to parse webhook payload: ${ e.message}`);
|
||||
res.writeHead(400);
|
||||
res.end('Invalid payload');
|
||||
}
|
||||
|
||||
@@ -185,7 +185,7 @@ class SelfUpdater extends EventEmitter {
|
||||
const frontendSrc = this._findDir(stagingDir, 'status');
|
||||
if (frontendSrc) {
|
||||
await this._copyDir(frontendSrc, this.config.frontendDir, [
|
||||
'dist', 'css', 'assets', 'vendor', 'index.html', 'sw.js'
|
||||
'dist', 'css', 'assets', 'vendor', 'index.html', 'sw.js',
|
||||
]);
|
||||
this.emit('update-progress', { step: 'frontend-updated', version: remoteInfo.version });
|
||||
}
|
||||
@@ -209,7 +209,7 @@ class SelfUpdater extends EventEmitter {
|
||||
};
|
||||
await fsp.writeFile(
|
||||
path.join(this.config.updatesDir, 'trigger.json'),
|
||||
JSON.stringify(trigger, null, 2)
|
||||
JSON.stringify(trigger, null, 2),
|
||||
);
|
||||
|
||||
// The host-side systemd service will handle the rest.
|
||||
@@ -312,7 +312,7 @@ class SelfUpdater extends EventEmitter {
|
||||
this.status = 'waiting';
|
||||
await fsp.writeFile(
|
||||
path.join(this.config.updatesDir, 'trigger.json'),
|
||||
JSON.stringify(trigger, null, 2)
|
||||
JSON.stringify(trigger, null, 2),
|
||||
);
|
||||
|
||||
this._addToHistory({
|
||||
@@ -412,12 +412,12 @@ class SelfUpdater extends EventEmitter {
|
||||
try {
|
||||
resolve(JSON.parse(data));
|
||||
} catch (e) {
|
||||
reject(new Error('Invalid JSON from ' + url));
|
||||
reject(new Error(`Invalid JSON from ${ url}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
req.on('error', reject);
|
||||
req.on('timeout', () => { req.destroy(); reject(new Error('Timeout fetching ' + url)); });
|
||||
req.on('timeout', () => { req.destroy(); reject(new Error(`Timeout fetching ${ url}`)); });
|
||||
});
|
||||
}
|
||||
|
||||
@@ -459,7 +459,7 @@ class SelfUpdater extends EventEmitter {
|
||||
try {
|
||||
execSync(`tar xzf "${tarballPath}" -C "${destDir}" --strip-components=1`, { stdio: 'pipe' });
|
||||
} catch (e) {
|
||||
throw new Error('Failed to extract tarball: ' + e.message);
|
||||
throw new Error(`Failed to extract tarball: ${ e.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ const { execSync } = require('child_process');
|
||||
const path = require('path');
|
||||
const {
|
||||
ValidationError, validateFilePath, validateURL, validateToken,
|
||||
validateServiceConfig, sanitizeString, isValidPort, validateSecurePath
|
||||
validateServiceConfig, sanitizeString, isValidPort, validateSecurePath,
|
||||
} = require('./input-validator');
|
||||
const validatorLib = require('validator');
|
||||
const credentialManager = require('./credential-manager');
|
||||
@@ -128,7 +128,7 @@ licenseManager.loadSecret(LICENSE_SECRET_FILE);
|
||||
// ===== Site configuration loaded from config.json (#5) =====
|
||||
// These are read at startup and refreshed on config save.
|
||||
// All code should use these instead of hardcoded values.
|
||||
let siteConfig = { tld: '.home', caName: '', dnsServerIp: '', dnsServerPort: CADDY.DEFAULT_DNS_PORT, dashboardHost: '', timezone: 'UTC', dnsServers: {}, configurationType: 'homelab', domain: '', routingMode: 'subdomain' };
|
||||
const siteConfig = { tld: '.home', caName: '', dnsServerIp: '', dnsServerPort: CADDY.DEFAULT_DNS_PORT, dashboardHost: '', timezone: 'UTC', dnsServers: {}, configurationType: 'homelab', domain: '', routingMode: 'subdomain' };
|
||||
|
||||
function loadSiteConfig() {
|
||||
try {
|
||||
@@ -147,7 +147,7 @@ function loadSiteConfig() {
|
||||
}
|
||||
|
||||
siteConfig.tld = raw.tld || '.home';
|
||||
if (!siteConfig.tld.startsWith('.')) siteConfig.tld = '.' + siteConfig.tld;
|
||||
if (!siteConfig.tld.startsWith('.')) siteConfig.tld = `.${ siteConfig.tld}`;
|
||||
siteConfig.caName = raw.caName || '';
|
||||
siteConfig.dnsServerIp = (raw.dns && raw.dns.ip) || '';
|
||||
siteConfig.dnsServerPort = (raw.dns && raw.dns.port) || CADDY.DEFAULT_DNS_PORT;
|
||||
@@ -199,7 +199,7 @@ async function callDns(server, apiPath, params) {
|
||||
const response = await fetchT(url, {
|
||||
method: 'GET',
|
||||
headers: { 'Accept': 'application/json' },
|
||||
agent: httpsAgent
|
||||
agent: httpsAgent,
|
||||
}, TIMEOUTS.HTTP_LONG);
|
||||
return response.json();
|
||||
}
|
||||
@@ -323,7 +323,7 @@ async function getServiceById(serviceId) {
|
||||
async function findContainerByName(name, opts = { all: false }) {
|
||||
const containers = await docker.listContainers(opts);
|
||||
const match = containers.find(c =>
|
||||
c.Names.some(n => n.toLowerCase().includes(name.toLowerCase()))
|
||||
c.Names.some(n => n.toLowerCase().includes(name.toLowerCase())),
|
||||
);
|
||||
return match || null;
|
||||
}
|
||||
@@ -348,7 +348,7 @@ async function requireDnsToken(providedToken) {
|
||||
if (providedToken) return providedToken;
|
||||
const result = await ensureValidDnsToken();
|
||||
if (result.success) return result.token;
|
||||
const err = new Error('No valid DNS token available. ' + result.error);
|
||||
const err = new Error(`No valid DNS token available. ${ result.error}`);
|
||||
err.statusCode = 401;
|
||||
throw err;
|
||||
}
|
||||
@@ -430,9 +430,9 @@ async function logError(context, error, additionalInfo = {}) {
|
||||
error: {
|
||||
message: error.message || error,
|
||||
stack: error.stack,
|
||||
code: error.code
|
||||
code: error.code,
|
||||
},
|
||||
...additionalInfo
|
||||
...additionalInfo,
|
||||
};
|
||||
|
||||
// Format log line with request context
|
||||
@@ -446,7 +446,7 @@ async function logError(context, error, additionalInfo = {}) {
|
||||
try {
|
||||
const stats = await fsp.stat(ERROR_LOG_FILE);
|
||||
if (stats.size > MAX_ERROR_LOG_SIZE) {
|
||||
const rotated = ERROR_LOG_FILE + '.1';
|
||||
const rotated = `${ERROR_LOG_FILE }.1`;
|
||||
if (await exists(rotated)) await fsp.unlink(rotated);
|
||||
await fsp.rename(ERROR_LOG_FILE, rotated);
|
||||
}
|
||||
@@ -519,7 +519,7 @@ let tailscaleConfig = {
|
||||
oauthConfigured: false, // true when OAuth credentials are stored
|
||||
tailnet: null, // tailnet name for API calls (e.g., "example.com" or "-")
|
||||
syncInterval: 300, // seconds between API syncs (default 5 min)
|
||||
lastSync: null // ISO timestamp of last successful sync
|
||||
lastSync: null, // ISO timestamp of last successful sync
|
||||
};
|
||||
|
||||
// Load Tailscale config from file
|
||||
@@ -605,7 +605,7 @@ async function getTailscaleAccessToken() {
|
||||
const res = await fetch(TAILSCALE.OAUTH_TOKEN_URL, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
|
||||
body: `client_id=${encodeURIComponent(clientId)}&client_secret=${encodeURIComponent(clientSecret)}&grant_type=client_credentials`
|
||||
body: `client_id=${encodeURIComponent(clientId)}&client_secret=${encodeURIComponent(clientSecret)}&grant_type=client_credentials`,
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
@@ -617,7 +617,7 @@ async function getTailscaleAccessToken() {
|
||||
const data = await res.json();
|
||||
_tsTokenCache = {
|
||||
token: data.access_token,
|
||||
expiresAt: Date.now() + (data.expires_in || 3600) * 1000
|
||||
expiresAt: Date.now() + (data.expires_in || 3600) * 1000,
|
||||
};
|
||||
return data.access_token;
|
||||
}
|
||||
@@ -629,7 +629,7 @@ async function syncFromTailscaleAPI() {
|
||||
if (!token || !tailnet) return null;
|
||||
|
||||
const res = await fetch(`${TAILSCALE.API_BASE}/tailnet/${encodeURIComponent(tailnet)}/devices`, {
|
||||
headers: { Authorization: `Bearer ${token}` }
|
||||
headers: { Authorization: `Bearer ${token}` },
|
||||
});
|
||||
if (!res.ok) throw new Error(`Tailscale API: HTTP ${res.status}`);
|
||||
|
||||
@@ -647,7 +647,7 @@ async function syncFromTailscaleAPI() {
|
||||
tags: d.tags || [],
|
||||
lastSeen: d.lastSeen,
|
||||
clientVersion: d.clientVersion,
|
||||
isExternal: d.isExternal || false
|
||||
isExternal: d.isExternal || false,
|
||||
}));
|
||||
|
||||
tailscaleConfig.devices = devices;
|
||||
@@ -670,7 +670,7 @@ function startTailscaleSyncTimer() {
|
||||
log.warn('tailscale', 'API sync failed', { error: error.message });
|
||||
}
|
||||
}, interval);
|
||||
log.info('tailscale', 'API sync enabled', { interval: interval / 1000 + 's' });
|
||||
log.info('tailscale', 'API sync enabled', { interval: `${interval / 1000 }s` });
|
||||
}
|
||||
|
||||
function stopTailscaleSyncTimer() {
|
||||
@@ -681,10 +681,10 @@ function stopTailscaleSyncTimer() {
|
||||
}
|
||||
|
||||
// TOTP authentication configuration
|
||||
let totpConfig = {
|
||||
const totpConfig = {
|
||||
enabled: false,
|
||||
sessionDuration: 'never', // 'never' = disabled, or '15m','30m','1h','2h','4h','8h','12h','24h'
|
||||
isSetUp: false // true once a secret has been verified
|
||||
isSetUp: false, // true once a secret has been verified
|
||||
};
|
||||
|
||||
async function loadTotpConfig() {
|
||||
@@ -725,20 +725,20 @@ let notificationConfig = {
|
||||
providers: {
|
||||
discord: { enabled: false, webhookUrl: '' },
|
||||
telegram: { enabled: false, botToken: '', chatId: '' },
|
||||
ntfy: { enabled: false, serverUrl: 'https://ntfy.sh', topic: '' }
|
||||
ntfy: { enabled: false, serverUrl: 'https://ntfy.sh', topic: '' },
|
||||
},
|
||||
events: {
|
||||
containerDown: true,
|
||||
containerUp: true,
|
||||
deploymentSuccess: true,
|
||||
deploymentFailed: true,
|
||||
serviceError: true
|
||||
serviceError: true,
|
||||
},
|
||||
healthCheck: {
|
||||
enabled: false,
|
||||
intervalMinutes: 5,
|
||||
lastCheck: null
|
||||
}
|
||||
lastCheck: null,
|
||||
},
|
||||
};
|
||||
|
||||
// Notification history (in-memory, last 100 entries)
|
||||
@@ -801,7 +801,7 @@ async function saveNotificationConfig() {
|
||||
function addNotificationToHistory(notification) {
|
||||
notificationHistory.unshift({
|
||||
...notification,
|
||||
timestamp: new Date().toISOString()
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
if (notificationHistory.length > MAX_NOTIFICATION_HISTORY) {
|
||||
notificationHistory = notificationHistory.slice(0, MAX_NOTIFICATION_HISTORY);
|
||||
@@ -817,7 +817,7 @@ async function sendDiscordNotification(title, message, type = 'info') {
|
||||
success: 0x00ff00, // Green
|
||||
error: 0xff0000, // Red
|
||||
warning: 0xffff00, // Yellow
|
||||
info: 0x0099ff // Blue
|
||||
info: 0x0099ff, // Blue
|
||||
};
|
||||
|
||||
const payload = {
|
||||
@@ -826,15 +826,15 @@ async function sendDiscordNotification(title, message, type = 'info') {
|
||||
description: message,
|
||||
color: colors[type] || colors.info,
|
||||
timestamp: new Date().toISOString(),
|
||||
footer: { text: 'DashCaddy Notifications' }
|
||||
}]
|
||||
footer: { text: 'DashCaddy Notifications' },
|
||||
}],
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await fetchT(webhookUrl, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(payload)
|
||||
body: JSON.stringify(payload),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
@@ -857,7 +857,7 @@ async function sendTelegramNotification(title, message, type = 'info') {
|
||||
success: '✅',
|
||||
error: '❌',
|
||||
warning: '⚠️',
|
||||
info: 'ℹ️'
|
||||
info: 'ℹ️',
|
||||
};
|
||||
|
||||
const text = `${emoji[type] || emoji.info} *DashCaddy: ${title}*\n\n${message}`;
|
||||
@@ -869,8 +869,8 @@ async function sendTelegramNotification(title, message, type = 'info') {
|
||||
body: JSON.stringify({
|
||||
chat_id: chatId,
|
||||
text: text,
|
||||
parse_mode: 'Markdown'
|
||||
})
|
||||
parse_mode: 'Markdown',
|
||||
}),
|
||||
});
|
||||
|
||||
const result = await response.json();
|
||||
@@ -894,14 +894,14 @@ async function sendNtfyNotification(title, message, type = 'info') {
|
||||
success: 3, // default
|
||||
error: 5, // max
|
||||
warning: 4, // high
|
||||
info: 3 // default
|
||||
info: 3, // default
|
||||
};
|
||||
|
||||
const tags = {
|
||||
success: 'white_check_mark',
|
||||
error: 'x',
|
||||
warning: 'warning',
|
||||
info: 'information_source'
|
||||
info: 'information_source',
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -910,9 +910,9 @@ async function sendNtfyNotification(title, message, type = 'info') {
|
||||
headers: {
|
||||
'Title': `DashCaddy: ${title}`,
|
||||
'Priority': String(priority[type] || 3),
|
||||
'Tags': tags[type] || 'information_source'
|
||||
'Tags': tags[type] || 'information_source',
|
||||
},
|
||||
body: message
|
||||
body: message,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
@@ -958,14 +958,14 @@ async function sendNotification(event, title, message, type = 'info') {
|
||||
title,
|
||||
message,
|
||||
type,
|
||||
results
|
||||
results,
|
||||
});
|
||||
|
||||
return { sent: true, results };
|
||||
}
|
||||
|
||||
// Container health monitoring state
|
||||
let containerHealthState = {};
|
||||
const containerHealthState = {};
|
||||
let healthCheckInterval = null;
|
||||
|
||||
// Check container health and send notifications
|
||||
@@ -1003,7 +1003,7 @@ async function checkContainerHealth() {
|
||||
'containerUp',
|
||||
'Container Recovered',
|
||||
`**${serviceName}** is now running again.`,
|
||||
'success'
|
||||
'success',
|
||||
);
|
||||
} else {
|
||||
// Container went down
|
||||
@@ -1011,7 +1011,7 @@ async function checkContainerHealth() {
|
||||
'containerDown',
|
||||
'Container Down',
|
||||
`**${serviceName}** has stopped running.\nStatus: ${container.Status}`,
|
||||
'error'
|
||||
'error',
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1082,13 +1082,13 @@ const middlewareResult = configureMiddleware(app, {
|
||||
siteConfig, totpConfig, tailscaleConfig,
|
||||
metrics, auditLogger, authManager, log, cryptoUtils,
|
||||
isValidContainerId, isTailscaleIP, getTailscaleStatus,
|
||||
RATE_LIMITS, LIMITS, APP, CACHE_CONFIGS, createCache
|
||||
RATE_LIMITS, LIMITS, APP, CACHE_CONFIGS, createCache,
|
||||
});
|
||||
|
||||
const {
|
||||
strictLimiter, SESSION_DURATIONS, ipSessions,
|
||||
getClientIP, createIPSession, setSessionCookie,
|
||||
clearIPSession, clearSessionCookie, isSessionValid
|
||||
clearIPSession, clearSessionCookie, isSessionValid,
|
||||
} = middlewareResult;
|
||||
|
||||
// ── Populate route context and mount extracted route modules ──
|
||||
@@ -1280,7 +1280,7 @@ app.get('/probe/:id', asyncHandler(async (req, res) => {
|
||||
const fReq = fLib.request({
|
||||
hostname: fp.hostname, port: 443, path: '/', method: 'GET',
|
||||
timeout: 5000, agent: httpsAgent,
|
||||
headers: { 'User-Agent': APP.USER_AGENTS.PROBE }
|
||||
headers: { 'User-Agent': APP.USER_AGENTS.PROBE },
|
||||
}, (fRes) => { fRes.resume(); resolve(fRes.statusCode); });
|
||||
fReq.on('error', reject);
|
||||
fReq.on('timeout', () => { fReq.destroy(); reject(new Error('Timeout')); });
|
||||
@@ -1305,7 +1305,7 @@ app.get('/api/network/ips', (req, res) => {
|
||||
localhost: '127.0.0.1',
|
||||
lan: envLan || null,
|
||||
tailscale: envTailscale || null,
|
||||
all: []
|
||||
all: [],
|
||||
};
|
||||
|
||||
// If env vars not set, try to detect from network interfaces
|
||||
@@ -1364,7 +1364,7 @@ async function refreshDnsToken(username, password, server) {
|
||||
const params = new URLSearchParams({
|
||||
user: username,
|
||||
pass: password,
|
||||
includeInfo: 'false'
|
||||
includeInfo: 'false',
|
||||
});
|
||||
|
||||
const response = await fetchT(
|
||||
@@ -1373,10 +1373,10 @@ async function refreshDnsToken(username, password, server) {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/x-www-form-urlencoded'
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
},
|
||||
timeout: 10000,
|
||||
},
|
||||
timeout: 10000
|
||||
}
|
||||
);
|
||||
|
||||
const result = await response.json();
|
||||
@@ -1436,7 +1436,7 @@ async function ensureValidDnsToken() {
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: 'No DNS credentials configured. Please set up credentials via /api/dns/credentials'
|
||||
error: 'No DNS credentials configured. Please set up credentials via /api/dns/credentials',
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1466,7 +1466,7 @@ async function getTokenForServer(targetServer, role = 'readonly') {
|
||||
const params = new URLSearchParams({
|
||||
user: username,
|
||||
pass: password,
|
||||
includeInfo: 'false'
|
||||
includeInfo: 'false',
|
||||
});
|
||||
|
||||
const response = await fetchT(
|
||||
@@ -1475,9 +1475,9 @@ async function getTokenForServer(targetServer, role = 'readonly') {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/x-www-form-urlencoded'
|
||||
}
|
||||
}
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const result = await response.json();
|
||||
@@ -1485,7 +1485,7 @@ async function getTokenForServer(targetServer, role = 'readonly') {
|
||||
if (result.status === 'ok' && result.token) {
|
||||
dnsServerTokens.set(cacheKey, {
|
||||
token: result.token,
|
||||
expiry: new Date(Date.now() + SESSION_TTL.DNS_TOKEN).toISOString()
|
||||
expiry: new Date(Date.now() + SESSION_TTL.DNS_TOKEN).toISOString(),
|
||||
});
|
||||
log.info('dns', 'DNS token obtained for server', { server: targetServer, role });
|
||||
return { success: true, token: result.token };
|
||||
@@ -1575,13 +1575,13 @@ function generateCaddyConfig(subdomain, ip, port, options = {}) {
|
||||
}
|
||||
|
||||
if (tailscaleOnly) {
|
||||
config += `\t\t@blocked not remote_ip 100.64.0.0/10`;
|
||||
config += '\t\t@blocked not remote_ip 100.64.0.0/10';
|
||||
if (allowedIPs.length > 0) config += ` ${allowedIPs.join(' ')}`;
|
||||
config += `\n\t\trespond @blocked "Access denied. Tailscale connection required." 403\n`;
|
||||
config += '\n\t\trespond @blocked "Access denied. Tailscale connection required." 403\n';
|
||||
}
|
||||
|
||||
config += `\t\treverse_proxy ${ip}:${port}\n`;
|
||||
config += `\t}`;
|
||||
config += '\t}';
|
||||
return config;
|
||||
}
|
||||
|
||||
@@ -1589,16 +1589,16 @@ function generateCaddyConfig(subdomain, ip, port, options = {}) {
|
||||
let config = `${buildDomain(subdomain)} {\n`;
|
||||
|
||||
if (tailscaleOnly) {
|
||||
config += ` @blocked not remote_ip 100.64.0.0/10`;
|
||||
config += ' @blocked not remote_ip 100.64.0.0/10';
|
||||
if (allowedIPs.length > 0) {
|
||||
config += ` ${allowedIPs.join(' ')}`;
|
||||
}
|
||||
config += `\n respond @blocked "Access denied. Tailscale connection required." 403\n`;
|
||||
config += '\n respond @blocked "Access denied. Tailscale connection required." 403\n';
|
||||
}
|
||||
|
||||
config += ` reverse_proxy ${ip}:${port}\n`;
|
||||
config += ` tls internal\n`;
|
||||
config += `}`;
|
||||
config += ' tls internal\n';
|
||||
config += '}';
|
||||
|
||||
return config;
|
||||
}
|
||||
@@ -1614,7 +1614,7 @@ async function reloadCaddy(content) {
|
||||
const response = await fetchT(`${CADDY_ADMIN_URL}/load`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': CADDY.CONTENT_TYPE },
|
||||
body: content
|
||||
body: content,
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
@@ -1648,7 +1648,7 @@ async function verifySiteAccessible(domain, maxAttempts = 5) {
|
||||
const response = await fetchT(`https://${domain}/`, {
|
||||
method: 'HEAD',
|
||||
agent: httpsAgent, // Ignore cert errors for internal CA
|
||||
timeout: 5000
|
||||
timeout: 5000,
|
||||
});
|
||||
|
||||
// Any response (even 4xx) means Caddy is serving the site
|
||||
@@ -1782,14 +1782,14 @@ app.use((err, req, res, next) => {
|
||||
success: false,
|
||||
error: err.message,
|
||||
code: err.code,
|
||||
...(err.details ? { details: err.details } : {})
|
||||
...(err.details ? { details: err.details } : {}),
|
||||
});
|
||||
}
|
||||
if (err instanceof ValidationError) {
|
||||
return res.status(err.statusCode || 400).json({
|
||||
success: false,
|
||||
error: err.message,
|
||||
errors: err.errors || undefined
|
||||
errors: err.errors || undefined,
|
||||
});
|
||||
}
|
||||
// Catch-all: never leak stack traces or internal paths
|
||||
@@ -1870,7 +1870,7 @@ const server = app.listen(PORT, '0.0.0.0', () => {
|
||||
ctx.notification.send('system.update',
|
||||
result.success ? 'DashCaddy Updated' : 'DashCaddy Update Failed',
|
||||
result.success ? `Updated to v${result.version}` : `Update failed: ${result.error || 'Unknown'}. Rolled back.`,
|
||||
result.success ? 'info' : 'error'
|
||||
result.success ? 'info' : 'error',
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1889,7 +1889,7 @@ const server = app.listen(PORT, '0.0.0.0', () => {
|
||||
log.info('maintenance', 'Docker maintenance completed', {
|
||||
spaceReclaimedMB: saved,
|
||||
pruned: result.pruned,
|
||||
warnings: result.warnings.length
|
||||
warnings: result.warnings.length,
|
||||
});
|
||||
}
|
||||
if (result.warnings.length > 0) {
|
||||
|
||||
@@ -108,7 +108,7 @@ async function validateStartupConfig({ log, CADDYFILE_PATH, SERVICES_FILE, CONFI
|
||||
port: urlObj.port,
|
||||
path: '/config/',
|
||||
method: 'GET',
|
||||
timeout: 2000
|
||||
timeout: 2000,
|
||||
}, (res) => {
|
||||
resolve(res.statusCode >= 200 && res.statusCode < 500);
|
||||
});
|
||||
|
||||
@@ -27,9 +27,9 @@ class StateManager {
|
||||
retries: {
|
||||
retries: options.lockRetries || 10,
|
||||
minTimeout: options.lockRetryInterval || 100,
|
||||
maxTimeout: (options.lockRetryInterval || 100) * 3
|
||||
maxTimeout: (options.lockRetryInterval || 100) * 3,
|
||||
},
|
||||
stale: options.lockTimeout || 30000 // 30 seconds
|
||||
stale: options.lockTimeout || 30000, // 30 seconds
|
||||
};
|
||||
|
||||
// Ensure file exists
|
||||
|
||||
@@ -26,7 +26,7 @@ const colors = {
|
||||
red: '\x1b[31m',
|
||||
yellow: '\x1b[33m',
|
||||
blue: '\x1b[34m',
|
||||
cyan: '\x1b[36m'
|
||||
cyan: '\x1b[36m',
|
||||
};
|
||||
|
||||
function log(message, color = 'reset') {
|
||||
@@ -56,7 +56,7 @@ async function makeRequest(path, options = {}) {
|
||||
path: url.pathname + url.search,
|
||||
method: options.method || 'GET',
|
||||
headers: options.headers || {},
|
||||
...options
|
||||
...options,
|
||||
};
|
||||
|
||||
const req = client.request(requestOptions, (res) => {
|
||||
@@ -67,7 +67,7 @@ async function makeRequest(path, options = {}) {
|
||||
statusCode: res.statusCode,
|
||||
headers: res.headers,
|
||||
body: data,
|
||||
data: data ? (data.startsWith('{') || data.startsWith('[') ? JSON.parse(data) : data) : null
|
||||
data: data ? (data.startsWith('{') || data.startsWith('[') ? JSON.parse(data) : data) : null,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -90,7 +90,7 @@ async function testPathTraversal() {
|
||||
{ path: '/api/browse/directories?path=../../../../../../etc/passwd', desc: 'Unix path traversal' },
|
||||
{ path: '/api/browse/directories?path=..\\..\\..\\Windows\\System32', desc: 'Windows path traversal' },
|
||||
{ path: '/api/browse/directories?path=%2e%2e%2f%2e%2e%2fetc%2fpasswd', desc: 'URL-encoded traversal' },
|
||||
{ path: '/api/browse/directories?path=/allowed/media/../../../secrets', desc: 'Mixed path traversal' }
|
||||
{ path: '/api/browse/directories?path=/allowed/media/../../../secrets', desc: 'Mixed path traversal' },
|
||||
];
|
||||
|
||||
for (const attack of attacks) {
|
||||
@@ -117,7 +117,7 @@ async function testRequestSizeLimits() {
|
||||
const response = await makeRequest('/api/services', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(smallPayload)
|
||||
body: JSON.stringify(smallPayload),
|
||||
});
|
||||
logResult(true, 'Small payload accepted (100 bytes)');
|
||||
} catch (error) {
|
||||
@@ -130,7 +130,7 @@ async function testRequestSizeLimits() {
|
||||
const response = await makeRequest('/api/services', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(largePayload)
|
||||
body: JSON.stringify(largePayload),
|
||||
});
|
||||
if (response.statusCode === 413 || response.statusCode === 400) {
|
||||
logResult(true, 'Large payload rejected on general endpoint (2MB)');
|
||||
@@ -151,7 +151,7 @@ async function testRequestSizeLimits() {
|
||||
const response = await makeRequest('/api/logo', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ logo: largeImage })
|
||||
body: JSON.stringify({ logo: largeImage }),
|
||||
});
|
||||
if (response.statusCode !== 413) {
|
||||
logResult(true, 'Large payload accepted on logo endpoint (5MB)');
|
||||
|
||||
@@ -83,7 +83,7 @@ class UpdateManager extends EventEmitter {
|
||||
currentDigest: currentDigest.substring(0, 12),
|
||||
latestDigest: latestDigest.substring(0, 12),
|
||||
currentTag: this.extractTag(imageName),
|
||||
detectedAt: new Date().toISOString()
|
||||
detectedAt: new Date().toISOString(),
|
||||
});
|
||||
|
||||
this.emit('update-available', this.availableUpdates.get(containerInfo.Id));
|
||||
@@ -137,8 +137,8 @@ class UpdateManager extends EventEmitter {
|
||||
path: `/v2/${repo}/manifests/${tag}`,
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/vnd.docker.distribution.manifest.v2+json'
|
||||
}
|
||||
'Accept': 'application/vnd.docker.distribution.manifest.v2+json',
|
||||
},
|
||||
};
|
||||
|
||||
const req = https.request(options, (res) => {
|
||||
@@ -206,8 +206,8 @@ class UpdateManager extends EventEmitter {
|
||||
...originalOptions,
|
||||
headers: {
|
||||
...originalOptions.headers,
|
||||
'Authorization': `Bearer ${token}`
|
||||
}
|
||||
'Authorization': `Bearer ${token}`,
|
||||
},
|
||||
};
|
||||
|
||||
const req = https.request(options, (res) => {
|
||||
@@ -271,7 +271,7 @@ class UpdateManager extends EventEmitter {
|
||||
config: inspect.Config,
|
||||
hostConfig: inspect.HostConfig,
|
||||
networkSettings: inspect.NetworkSettings,
|
||||
timestamp: new Date().toISOString()
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
|
||||
// Pull latest image
|
||||
@@ -292,7 +292,7 @@ class UpdateManager extends EventEmitter {
|
||||
name: containerName,
|
||||
Image: imageName,
|
||||
...backup.config,
|
||||
HostConfig: backup.hostConfig
|
||||
HostConfig: backup.hostConfig,
|
||||
});
|
||||
|
||||
// Start new container
|
||||
@@ -300,7 +300,7 @@ class UpdateManager extends EventEmitter {
|
||||
await newContainer.start();
|
||||
|
||||
// Extended verification with health checks and port accessibility
|
||||
console.log(`[UpdateManager] Performing extended verification...`);
|
||||
console.log('[UpdateManager] Performing extended verification...');
|
||||
await this.verifyContainerExtended(newContainer, inspect, options.verifyTimeout || 60000);
|
||||
|
||||
// Get new image ID
|
||||
@@ -313,7 +313,7 @@ class UpdateManager extends EventEmitter {
|
||||
console.log(`[UpdateManager] Removing old image: ${oldImageId.substring(0, 12)}`);
|
||||
const oldImage = docker.getImage(oldImageId);
|
||||
await oldImage.remove({ force: false });
|
||||
console.log(`[UpdateManager] Old image removed successfully`);
|
||||
console.log('[UpdateManager] Old image removed successfully');
|
||||
} catch (error) {
|
||||
console.warn(`[UpdateManager] Could not remove old image (may be in use): ${error.message}`);
|
||||
}
|
||||
@@ -330,7 +330,7 @@ class UpdateManager extends EventEmitter {
|
||||
timestamp: new Date().toISOString(),
|
||||
duration,
|
||||
status: 'success',
|
||||
backup
|
||||
backup,
|
||||
};
|
||||
|
||||
this.addToHistory(historyEntry);
|
||||
@@ -348,7 +348,7 @@ class UpdateManager extends EventEmitter {
|
||||
timestamp: new Date().toISOString(),
|
||||
duration,
|
||||
status: 'failed',
|
||||
error: error.message
|
||||
error: error.message,
|
||||
};
|
||||
|
||||
this.addToHistory(historyEntry);
|
||||
@@ -360,7 +360,7 @@ class UpdateManager extends EventEmitter {
|
||||
try {
|
||||
await this.rollbackUpdate(containerId);
|
||||
} catch (rollbackError) {
|
||||
console.error(`[UpdateManager] Rollback failed:`, rollbackError.message);
|
||||
console.error('[UpdateManager] Rollback failed:', rollbackError.message);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -448,7 +448,7 @@ class UpdateManager extends EventEmitter {
|
||||
// Step 2: Check Docker health check if available
|
||||
if (inspect.State.Health) {
|
||||
if (inspect.State.Health.Status === 'healthy') {
|
||||
console.log(`[UpdateManager] Container health check: healthy`);
|
||||
console.log('[UpdateManager] Container health check: healthy');
|
||||
return true;
|
||||
} else if (inspect.State.Health.Status === 'unhealthy') {
|
||||
lastError = 'Container health check failed (unhealthy)';
|
||||
@@ -468,7 +468,7 @@ class UpdateManager extends EventEmitter {
|
||||
try {
|
||||
const response = await fetch(testUrl, {
|
||||
signal: AbortSignal.timeout(3000),
|
||||
redirect: 'manual'
|
||||
redirect: 'manual',
|
||||
});
|
||||
|
||||
// Accept 2xx, 3xx, 4xx as "accessible" (server is responding)
|
||||
@@ -477,7 +477,7 @@ class UpdateManager extends EventEmitter {
|
||||
|
||||
// Wait a bit more to ensure stability
|
||||
if (attempt >= 2) {
|
||||
console.log(`[UpdateManager] Container verified successfully`);
|
||||
console.log('[UpdateManager] Container verified successfully');
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -488,7 +488,7 @@ class UpdateManager extends EventEmitter {
|
||||
} else {
|
||||
// No ports exposed - just verify it's running for a few cycles
|
||||
if (attempt >= 5) {
|
||||
console.log(`[UpdateManager] Container running without exposed ports (verified)`);
|
||||
console.log('[UpdateManager] Container running without exposed ports (verified)');
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -529,7 +529,7 @@ class UpdateManager extends EventEmitter {
|
||||
ports.push({
|
||||
containerPort: containerPort.split('/')[0],
|
||||
hostPort: binding.HostPort,
|
||||
protocol: containerPort.split('/')[1] || 'tcp'
|
||||
protocol: containerPort.split('/')[1] || 'tcp',
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -572,7 +572,7 @@ class UpdateManager extends EventEmitter {
|
||||
name: backup.containerName,
|
||||
Image: backup.imageName,
|
||||
...backup.config,
|
||||
HostConfig: backup.hostConfig
|
||||
HostConfig: backup.hostConfig,
|
||||
});
|
||||
|
||||
await newContainer.start();
|
||||
@@ -582,7 +582,7 @@ class UpdateManager extends EventEmitter {
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error(`[UpdateManager] Rollback failed:`, error.message);
|
||||
console.error('[UpdateManager] Rollback failed:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -599,7 +599,7 @@ class UpdateManager extends EventEmitter {
|
||||
|
||||
setTimeout(() => {
|
||||
this.updateContainer(containerId).catch(error => {
|
||||
console.error(`[UpdateManager] Scheduled update failed:`, error.message);
|
||||
console.error('[UpdateManager] Scheduled update failed:', error.message);
|
||||
});
|
||||
}, delay);
|
||||
|
||||
@@ -663,20 +663,20 @@ class UpdateManager extends EventEmitter {
|
||||
shortDescription: repoInfo?.description?.substring(0, 200) || '',
|
||||
starCount: repoInfo?.star_count || 0,
|
||||
pullCount: repoInfo?.pull_count || 0,
|
||||
lastUpdated: repoInfo?.last_updated || null
|
||||
lastUpdated: repoInfo?.last_updated || null,
|
||||
},
|
||||
tags: tags.slice(0, 10).map(t => ({
|
||||
name: t.name,
|
||||
lastPushed: t.last_pushed || t.tag_last_pushed,
|
||||
digest: t.digest?.substring(0, 12) || 'unknown',
|
||||
size: t.full_size || t.size || 0
|
||||
size: t.full_size || t.size || 0,
|
||||
})),
|
||||
urls: {
|
||||
dockerHub: hubUrl,
|
||||
tags: `${hubUrl}/tags`,
|
||||
dockerfile: repoInfo?.dockerfile_url || null
|
||||
dockerfile: repoInfo?.dockerfile_url || null,
|
||||
},
|
||||
changelog: this.formatChangelog(repoInfo, tags, imageTag)
|
||||
changelog: this.formatChangelog(repoInfo, tags, imageTag),
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`[UpdateManager] Error fetching changelog for ${imageName}:`, error.message);
|
||||
@@ -691,7 +691,7 @@ class UpdateManager extends EventEmitter {
|
||||
urls: {
|
||||
dockerHub: `https://hub.docker.com/r/${repoPath.replace('library/', '_/')}`,
|
||||
},
|
||||
changelog: 'Unable to fetch changelog. Visit Docker Hub for details.'
|
||||
changelog: 'Unable to fetch changelog. Visit Docker Hub for details.',
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -711,8 +711,8 @@ class UpdateManager extends EventEmitter {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'User-Agent': 'DashCaddy/1.0'
|
||||
}
|
||||
'User-Agent': 'DashCaddy/1.0',
|
||||
},
|
||||
};
|
||||
|
||||
const req = https.request(options, (res) => {
|
||||
@@ -755,8 +755,8 @@ class UpdateManager extends EventEmitter {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'User-Agent': 'DashCaddy/1.0'
|
||||
}
|
||||
'User-Agent': 'DashCaddy/1.0',
|
||||
},
|
||||
};
|
||||
|
||||
const req = https.request(options, (res) => {
|
||||
@@ -836,7 +836,7 @@ class UpdateManager extends EventEmitter {
|
||||
schedule: config.schedule || 'weekly',
|
||||
maintenanceWindow: config.maintenanceWindow,
|
||||
autoRollback: config.autoRollback !== false,
|
||||
securityOnly: config.securityOnly || false
|
||||
securityOnly: config.securityOnly || false,
|
||||
};
|
||||
|
||||
this.saveConfig();
|
||||
|
||||
Reference in New Issue
Block a user