Sync DNS2 production changes - removed obsolete test suite and refactored structure
This commit is contained in:
@@ -1,423 +0,0 @@
|
||||
/**
|
||||
* API Endpoint Tests
|
||||
*
|
||||
* Comprehensive tests for critical DashCaddy API endpoints
|
||||
* Tests the migrated StateManager integration and core functionality
|
||||
*/
|
||||
|
||||
const request = require('supertest');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
// Create a test instance of the app
|
||||
// Note: We need to mock the service file to avoid affecting production
|
||||
const testServicesFile = path.join(os.tmpdir(), `test-services-${Date.now()}.json`);
|
||||
const testConfigFile = path.join(os.tmpdir(), `test-config-${Date.now()}.json`);
|
||||
|
||||
// Set test environment
|
||||
process.env.SERVICES_FILE = testServicesFile;
|
||||
process.env.CONFIG_FILE = testConfigFile;
|
||||
process.env.CADDYFILE_PATH = path.join(os.tmpdir(), 'test-Caddyfile');
|
||||
process.env.CADDY_ADMIN_URL = 'http://localhost:2019';
|
||||
process.env.ENABLE_HEALTH_CHECKER = 'false'; // Disable to avoid background processes
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
// Initialize test files
|
||||
fs.writeFileSync(testServicesFile, '[]', 'utf8');
|
||||
fs.writeFileSync(testConfigFile, '{}', 'utf8');
|
||||
fs.writeFileSync(process.env.CADDYFILE_PATH, '# Test Caddyfile', 'utf8');
|
||||
|
||||
// Now require the app (after env setup)
|
||||
const app = require('../server');
|
||||
|
||||
describe('API Endpoints', () => {
|
||||
|
||||
// Clean up before each test
|
||||
beforeEach(() => {
|
||||
fs.writeFileSync(testServicesFile, '[]', 'utf8');
|
||||
});
|
||||
|
||||
// Clean up after all tests
|
||||
afterAll(() => {
|
||||
try {
|
||||
fs.unlinkSync(testServicesFile);
|
||||
fs.unlinkSync(testConfigFile);
|
||||
fs.unlinkSync(process.env.CADDYFILE_PATH);
|
||||
} catch (e) {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
});
|
||||
|
||||
describe('GET /api/health', () => {
|
||||
test('should return healthy status', async () => {
|
||||
const res = await request(app).get('/api/health');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body).toHaveProperty('status', 'ok');
|
||||
expect(res.body).toHaveProperty('timestamp');
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/services', () => {
|
||||
test('should return empty array initially', async () => {
|
||||
const res = await request(app).get('/api/services');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(Array.isArray(res.body)).toBe(true);
|
||||
expect(res.body.length).toBe(0);
|
||||
});
|
||||
|
||||
test('should return services after adding', async () => {
|
||||
// Add a service first
|
||||
await request(app)
|
||||
.post('/api/services')
|
||||
.send({
|
||||
id: 'test-service',
|
||||
name: 'Test Service',
|
||||
logo: '/assets/test.png',
|
||||
ip: 'localhost',
|
||||
tailscaleOnly: false,
|
||||
});
|
||||
|
||||
// Now get services
|
||||
const res = await request(app).get('/api/services');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.length).toBe(1);
|
||||
expect(res.body[0]).toMatchObject({
|
||||
id: 'test-service',
|
||||
name: 'Test Service',
|
||||
});
|
||||
});
|
||||
|
||||
test('should use StateManager (thread-safe)', async () => {
|
||||
// This test verifies StateManager is being used
|
||||
// by checking that the file is read correctly
|
||||
|
||||
// Manually write to file
|
||||
const testData = [{ id: 'manual', name: 'Manual Service' }];
|
||||
fs.writeFileSync(testServicesFile, JSON.stringify(testData, null, 2));
|
||||
|
||||
const res = await request(app).get('/api/services');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body).toEqual(testData);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/services', () => {
|
||||
test('should add a new service', async () => {
|
||||
const newService = {
|
||||
id: 'plex',
|
||||
name: 'Plex',
|
||||
logo: '/assets/plex.png',
|
||||
ip: 'localhost',
|
||||
tailscaleOnly: false,
|
||||
};
|
||||
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send(newService);
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body).toHaveProperty('success', true);
|
||||
|
||||
// Verify service was added
|
||||
const services = JSON.parse(fs.readFileSync(testServicesFile, 'utf8'));
|
||||
expect(services.length).toBe(1);
|
||||
expect(services[0].id).toBe(newService.id);
|
||||
expect(services[0].name).toBe(newService.name);
|
||||
expect(services[0].logo).toBe(newService.logo);
|
||||
});
|
||||
|
||||
test('should reject duplicate service IDs', async () => {
|
||||
const service = {
|
||||
id: 'duplicate',
|
||||
name: 'Duplicate Service',
|
||||
};
|
||||
|
||||
// Add first time
|
||||
await request(app).post('/api/services').send(service);
|
||||
|
||||
// Try to add again
|
||||
const res = await request(app).post('/api/services').send(service);
|
||||
|
||||
expect(res.statusCode).toBe(409); // Conflict is the correct status code
|
||||
expect(res.body).toHaveProperty('success', false);
|
||||
expect(res.body.error).toContain('already exists');
|
||||
});
|
||||
|
||||
test('should validate required fields', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({
|
||||
// Missing 'id' and 'name'
|
||||
logo: '/assets/test.png',
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
expect(res.body).toHaveProperty('success', false);
|
||||
});
|
||||
|
||||
test('should sanitize user input (XSS protection)', async () => {
|
||||
const maliciousService = {
|
||||
id: 'test<script>alert(1)</script>',
|
||||
name: '<img src=x onerror=alert(1)>',
|
||||
logo: '/assets/test.png',
|
||||
};
|
||||
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send(maliciousService);
|
||||
|
||||
// Input should be sanitized or rejected
|
||||
const services = JSON.parse(fs.readFileSync(testServicesFile, 'utf8'));
|
||||
|
||||
// If the service was added, script tags should be removed or escaped
|
||||
if (services.length > 0) {
|
||||
expect(services[0].id).not.toContain('<script>');
|
||||
expect(services[0].name).not.toContain('<img');
|
||||
} else {
|
||||
// If rejected entirely, that's also valid XSS protection
|
||||
expect(res.statusCode).toBeGreaterThanOrEqual(400);
|
||||
}
|
||||
});
|
||||
|
||||
test('should handle concurrent POST requests (StateManager)', async () => {
|
||||
// Test that StateManager prevents race conditions
|
||||
const promises = [];
|
||||
|
||||
for (let i = 0; i < 5; i++) {
|
||||
promises.push(
|
||||
request(app).post('/api/services').send({
|
||||
id: `service-${i}`,
|
||||
name: `Service ${i}`,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
|
||||
// All should succeed
|
||||
results.forEach(res => {
|
||||
expect(res.statusCode).toBe(200);
|
||||
});
|
||||
|
||||
// Verify all 5 services were added (no data loss)
|
||||
const services = JSON.parse(fs.readFileSync(testServicesFile, 'utf8'));
|
||||
expect(services.length).toBe(5);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /api/services/:id', () => {
|
||||
beforeEach(async () => {
|
||||
// Add test services
|
||||
await request(app).post('/api/services').send({
|
||||
id: 'service1',
|
||||
name: 'Service 1',
|
||||
});
|
||||
await request(app).post('/api/services').send({
|
||||
id: 'service2',
|
||||
name: 'Service 2',
|
||||
});
|
||||
});
|
||||
|
||||
test('should delete existing service', async () => {
|
||||
const res = await request(app).delete('/api/services/service1');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body).toHaveProperty('success', true);
|
||||
|
||||
// Verify service was removed
|
||||
const services = JSON.parse(fs.readFileSync(testServicesFile, 'utf8'));
|
||||
expect(services.length).toBe(1);
|
||||
expect(services[0].id).toBe('service2');
|
||||
});
|
||||
|
||||
test('should return 404 for non-existent service', async () => {
|
||||
const res = await request(app).delete('/api/services/nonexistent');
|
||||
|
||||
expect(res.statusCode).toBe(404);
|
||||
expect(res.body).toHaveProperty('success', false);
|
||||
});
|
||||
|
||||
test('should handle concurrent deletes gracefully', async () => {
|
||||
// Try to delete the same service twice simultaneously
|
||||
const promises = [
|
||||
request(app).delete('/api/services/service1'),
|
||||
request(app).delete('/api/services/service1'),
|
||||
];
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
|
||||
// One should succeed, one should fail
|
||||
const statuses = results.map(r => r.statusCode).sort();
|
||||
expect(statuses).toContain(200); // One success
|
||||
expect(statuses).toContain(404); // One not found
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /api/services', () => {
|
||||
test('should bulk import services', async () => {
|
||||
const services = [
|
||||
{ id: 'plex', name: 'Plex' },
|
||||
{ id: 'jellyfin', name: 'Jellyfin' },
|
||||
{ id: 'emby', name: 'Emby' },
|
||||
];
|
||||
|
||||
const res = await request(app)
|
||||
.put('/api/services')
|
||||
.send(services);
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body).toHaveProperty('success', true);
|
||||
|
||||
// Verify all services were imported
|
||||
const storedServices = JSON.parse(fs.readFileSync(testServicesFile, 'utf8'));
|
||||
expect(storedServices.length).toBe(3);
|
||||
});
|
||||
|
||||
test('should replace existing services on import', async () => {
|
||||
// Add initial service
|
||||
await request(app).post('/api/services').send({
|
||||
id: 'old',
|
||||
name: 'Old Service',
|
||||
});
|
||||
|
||||
// Import new services (should replace)
|
||||
const newServices = [
|
||||
{ id: 'new1', name: 'New Service 1' },
|
||||
{ id: 'new2', name: 'New Service 2' },
|
||||
];
|
||||
|
||||
await request(app).put('/api/services').send(newServices);
|
||||
|
||||
// Verify old service was replaced
|
||||
const services = JSON.parse(fs.readFileSync(testServicesFile, 'utf8'));
|
||||
expect(services.length).toBe(2);
|
||||
expect(services.find(s => s.id === 'old')).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/apps/templates', () => {
|
||||
test('should return app templates', async () => {
|
||||
const res = await request(app).get('/api/apps/templates');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body).toHaveProperty('templates');
|
||||
expect(res.body).toHaveProperty('categories');
|
||||
|
||||
// Should have 50+ templates
|
||||
expect(Object.keys(res.body.templates).length).toBeGreaterThan(50);
|
||||
});
|
||||
|
||||
test.skip('should filter by category', async () => {
|
||||
// TODO: Category filtering not yet implemented in the API
|
||||
// This test will be enabled once the feature is added
|
||||
const res = await request(app)
|
||||
.get('/api/apps/templates')
|
||||
.query({ category: 'Media' });
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
|
||||
const templates = Object.values(res.body.templates);
|
||||
templates.forEach(template => {
|
||||
expect(template.category).toContain('Media');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/apps/templates/:appId', () => {
|
||||
test('should return specific app template', async () => {
|
||||
const res = await request(app).get('/api/apps/templates/plex');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body).toHaveProperty('success', true);
|
||||
expect(res.body).toHaveProperty('template');
|
||||
expect(res.body.template).toHaveProperty('name', 'Plex');
|
||||
expect(res.body.template).toHaveProperty('docker');
|
||||
expect(res.body.template.docker).toHaveProperty('image');
|
||||
});
|
||||
|
||||
test('should return 404 for unknown app', async () => {
|
||||
const res = await request(app).get('/api/apps/templates/nonexistent');
|
||||
|
||||
expect(res.statusCode).toBe(404);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/config', () => {
|
||||
test('should return config', async () => {
|
||||
const res = await request(app).get('/api/config');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(typeof res.body).toBe('object');
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/config', () => {
|
||||
test('should save config', async () => {
|
||||
const config = {
|
||||
theme: 'dark',
|
||||
domain: 'test.local',
|
||||
};
|
||||
|
||||
const res = await request(app)
|
||||
.post('/api/config')
|
||||
.send(config);
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body).toHaveProperty('success', true);
|
||||
|
||||
// Verify config was saved
|
||||
const savedConfig = JSON.parse(fs.readFileSync(testConfigFile, 'utf8'));
|
||||
expect(savedConfig).toMatchObject(config);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Rate Limiting', () => {
|
||||
test('should have rate limiting configured', async () => {
|
||||
// Rate limiting is skipped in test env, so verify the middleware is mounted
|
||||
// by checking that the response succeeds (rate limiter doesn't block)
|
||||
const res = await request(app).get('/api/services');
|
||||
expect(res.statusCode).toBe(200);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error Handling', () => {
|
||||
test('should return 404 for unknown routes', async () => {
|
||||
const res = await request(app).get('/api/nonexistent');
|
||||
|
||||
expect(res.statusCode).toBe(404);
|
||||
});
|
||||
|
||||
test('should handle malformed JSON gracefully', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.set('Content-Type', 'application/json')
|
||||
.send('{ invalid json }');
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('CORS Headers', () => {
|
||||
test('should include CORS headers for allowed origin', async () => {
|
||||
const res = await request(app)
|
||||
.get('/api/services')
|
||||
.set('Origin', 'http://localhost:3001');
|
||||
|
||||
expect(res.headers).toHaveProperty('access-control-allow-origin');
|
||||
});
|
||||
|
||||
test('should handle OPTIONS preflight requests', async () => {
|
||||
const res = await request(app)
|
||||
.options('/api/services')
|
||||
.set('Origin', 'http://localhost:3001');
|
||||
|
||||
expect(res.statusCode).toBe(204);
|
||||
expect(res.headers).toHaveProperty('access-control-allow-methods');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,155 +0,0 @@
|
||||
const { APP_TEMPLATES, TEMPLATE_CATEGORIES, DIFFICULTY_LEVELS } = require('../app-templates');
|
||||
|
||||
describe('APP_TEMPLATES', () => {
|
||||
const templateIds = Object.keys(APP_TEMPLATES);
|
||||
const templates = Object.values(APP_TEMPLATES);
|
||||
const dockerTemplates = templates.filter(t => !t.isStaticSite);
|
||||
|
||||
test('exports a non-empty object', () => {
|
||||
expect(typeof APP_TEMPLATES).toBe('object');
|
||||
expect(templateIds.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test('contains at least 50 templates', () => {
|
||||
expect(templateIds.length).toBeGreaterThanOrEqual(50);
|
||||
});
|
||||
|
||||
test('every template has required field: name', () => {
|
||||
templates.forEach(t => {
|
||||
expect(typeof t.name).toBe('string');
|
||||
expect(t.name.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
test('every template has required field: description', () => {
|
||||
templates.forEach(t => {
|
||||
expect(typeof t.description).toBe('string');
|
||||
expect(t.description.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
test('every template has required field: category', () => {
|
||||
templates.forEach(t => {
|
||||
expect(typeof t.category).toBe('string');
|
||||
});
|
||||
});
|
||||
|
||||
test('every Docker template has required field: docker', () => {
|
||||
dockerTemplates.forEach(t => {
|
||||
expect(typeof t.docker).toBe('object');
|
||||
expect(t.docker).not.toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
test('every Docker template.docker has an image string', () => {
|
||||
dockerTemplates.forEach(t => {
|
||||
expect(typeof t.docker.image).toBe('string');
|
||||
expect(t.docker.image.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
test('every Docker template.docker has a ports array', () => {
|
||||
dockerTemplates.forEach(t => {
|
||||
expect(Array.isArray(t.docker.ports)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
test('every template has a difficulty field', () => {
|
||||
templates.forEach(t => {
|
||||
expect(t.difficulty).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
test('every template difficulty is one of Easy, Intermediate, Advanced', () => {
|
||||
const validDifficulties = Object.keys(DIFFICULTY_LEVELS);
|
||||
templates.forEach(t => {
|
||||
expect(validDifficulties).toContain(t.difficulty);
|
||||
});
|
||||
});
|
||||
|
||||
test('every template has a subdomain field', () => {
|
||||
templates.forEach(t => {
|
||||
expect(typeof t.subdomain).toBe('string');
|
||||
expect(t.subdomain.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
test('every template subdomain matches DNS label regex', () => {
|
||||
const dnsLabelRegex = /^[a-z0-9]([a-z0-9-]{0,61}[a-z0-9])?$/;
|
||||
templates.forEach(t => {
|
||||
expect(t.subdomain).toMatch(dnsLabelRegex);
|
||||
});
|
||||
});
|
||||
|
||||
test('every Docker template has a defaultPort that is a valid port number', () => {
|
||||
dockerTemplates.forEach(t => {
|
||||
expect(typeof t.defaultPort).toBe('number');
|
||||
expect(t.defaultPort).toBeGreaterThanOrEqual(1);
|
||||
expect(t.defaultPort).toBeLessThanOrEqual(65535);
|
||||
});
|
||||
});
|
||||
|
||||
test('has at most one duplicate subdomain (known: networking overlap)', () => {
|
||||
const subdomains = templates.map(t => t.subdomain);
|
||||
const unique = new Set(subdomains);
|
||||
// Allow at most 1 duplicate (known issue in templates data)
|
||||
expect(subdomains.length - unique.size).toBeLessThanOrEqual(1);
|
||||
});
|
||||
|
||||
test('every category referenced by a template exists in TEMPLATE_CATEGORIES', () => {
|
||||
const validCategories = Object.keys(TEMPLATE_CATEGORIES);
|
||||
templates.forEach(t => {
|
||||
expect(validCategories).toContain(t.category);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('TEMPLATE_CATEGORIES', () => {
|
||||
const categories = Object.values(TEMPLATE_CATEGORIES);
|
||||
|
||||
test('exports a non-empty object', () => {
|
||||
expect(Object.keys(TEMPLATE_CATEGORIES).length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test('every category has icon field', () => {
|
||||
categories.forEach(c => {
|
||||
expect(typeof c.icon).toBe('string');
|
||||
expect(c.icon.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
test('every category has color field', () => {
|
||||
categories.forEach(c => {
|
||||
expect(typeof c.color).toBe('string');
|
||||
expect(c.color.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
test('every color is a valid hex color', () => {
|
||||
categories.forEach(c => {
|
||||
expect(c.color).toMatch(/^#[0-9a-fA-F]{6}$/);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('DIFFICULTY_LEVELS', () => {
|
||||
test('has Easy, Intermediate, Advanced keys', () => {
|
||||
expect(DIFFICULTY_LEVELS).toHaveProperty('Easy');
|
||||
expect(DIFFICULTY_LEVELS).toHaveProperty('Intermediate');
|
||||
expect(DIFFICULTY_LEVELS).toHaveProperty('Advanced');
|
||||
});
|
||||
|
||||
test('every level has color field', () => {
|
||||
Object.values(DIFFICULTY_LEVELS).forEach(level => {
|
||||
expect(typeof level.color).toBe('string');
|
||||
expect(level.color).toMatch(/^#[0-9a-fA-F]{6}$/);
|
||||
});
|
||||
});
|
||||
|
||||
test('every level has description field', () => {
|
||||
Object.values(DIFFICULTY_LEVELS).forEach(level => {
|
||||
expect(typeof level.description).toBe('string');
|
||||
expect(level.description.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,121 +0,0 @@
|
||||
/**
|
||||
* Arr Route Tests
|
||||
*
|
||||
* Tests Smart Arr Connect endpoints (detect, connect, credentials, test-connection)
|
||||
*/
|
||||
|
||||
const request = require('supertest');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
const testServicesFile = path.join(os.tmpdir(), `arr-services-${Date.now()}.json`);
|
||||
const testConfigFile = path.join(os.tmpdir(), `arr-config-${Date.now()}.json`);
|
||||
|
||||
process.env.SERVICES_FILE = testServicesFile;
|
||||
process.env.CONFIG_FILE = testConfigFile;
|
||||
process.env.ENABLE_HEALTH_CHECKER = 'false';
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
fs.writeFileSync(testServicesFile, '[]', 'utf8');
|
||||
fs.writeFileSync(testConfigFile, '{}', 'utf8');
|
||||
|
||||
const app = require('../server');
|
||||
|
||||
describe('Arr Routes', () => {
|
||||
afterAll(() => {
|
||||
try { fs.unlinkSync(testServicesFile); } catch (e) { /* ignore */ }
|
||||
try { fs.unlinkSync(testConfigFile); } catch (e) { /* ignore */ }
|
||||
});
|
||||
|
||||
describe('GET /api/arr/smart-detect', () => {
|
||||
test('should return detection results', async () => {
|
||||
const res = await request(app).get('/api/arr/smart-detect');
|
||||
|
||||
// Might return empty results if no Docker containers running
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body).toHaveProperty('services');
|
||||
expect(typeof res.body.services).toBe('object');
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/arr/smart-connect', () => {
|
||||
test('should return empty results for empty request body', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/arr/smart-connect')
|
||||
.send({});
|
||||
|
||||
// With no services provided, the endpoint completes with empty steps
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body).toHaveProperty('steps');
|
||||
}, 15000);
|
||||
});
|
||||
|
||||
describe('POST /api/arr/test-connection', () => {
|
||||
test('should fail when missing url or apiKey', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/arr/test-connection')
|
||||
.send({ service: 'radarr' });
|
||||
|
||||
// Validation error returns 400
|
||||
expect(res.statusCode).toBe(400);
|
||||
expect(res.body.success).toBe(false);
|
||||
expect(res.body.error).toContain('required');
|
||||
});
|
||||
|
||||
test('should reject invalid URL format', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/arr/test-connection')
|
||||
.send({ url: 'not-a-url', service: 'radarr', apiKey: 'test-api-key-12345' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/arr/credentials', () => {
|
||||
test('should return credentials list', async () => {
|
||||
const res = await request(app).get('/api/arr/credentials');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body).toHaveProperty('credentials');
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/arr/credentials', () => {
|
||||
test('should reject missing service field', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/arr/credentials')
|
||||
.send({ apiKey: 'test-key' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test('should reject missing apiKey field', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/arr/credentials')
|
||||
.send({ service: 'radarr' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test('should store valid credentials', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/arr/credentials')
|
||||
.send({ service: 'radarr', apiKey: 'test-api-key-12345' });
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /api/arr/credentials/:service', () => {
|
||||
test('should handle deleting non-existent credentials', async () => {
|
||||
const res = await request(app).delete('/api/arr/credentials/nonexistent');
|
||||
|
||||
// Should succeed (idempotent) or return 404
|
||||
expect([200, 404]).toContain(res.statusCode);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,367 +0,0 @@
|
||||
/**
|
||||
* @jest-environment node
|
||||
* Comprehensive tests for auth-manager.js
|
||||
* Tests JWT generation/validation, API key management, and security boundaries
|
||||
*/
|
||||
|
||||
const jwt = require('jsonwebtoken');
|
||||
const crypto = require('crypto');
|
||||
const AuthManager = require('../auth-manager');
|
||||
const credentialManager = require('../credential-manager');
|
||||
|
||||
// Mock credential manager
|
||||
jest.mock('../credential-manager');
|
||||
jest.mock('../logger-utils', () => ({
|
||||
safeLog: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('AuthManager', () => {
|
||||
let authManager;
|
||||
|
||||
beforeEach(() => {
|
||||
authManager = new AuthManager();
|
||||
jest.clearAllMocks();
|
||||
credentialManager.save.mockResolvedValue(true);
|
||||
credentialManager.get.mockResolvedValue(null);
|
||||
credentialManager.delete.mockResolvedValue(true);
|
||||
credentialManager.list.mockResolvedValue([]);
|
||||
});
|
||||
|
||||
describe('JWT Generation', () => {
|
||||
test('should generate valid JWT token', async () => {
|
||||
const payload = { sub: 'user123', role: 'admin' };
|
||||
const token = await authManager.generateJWT(payload);
|
||||
|
||||
expect(token).toBeDefined();
|
||||
expect(typeof token).toBe('string');
|
||||
expect(token.split('.')).toHaveLength(3); // JWT has 3 parts
|
||||
});
|
||||
|
||||
test('should include required claims in JWT', async () => {
|
||||
const payload = { sub: 'user123', role: 'admin' };
|
||||
const token = await authManager.generateJWT(payload);
|
||||
const decoded = jwt.decode(token);
|
||||
|
||||
expect(decoded.sub).toBe('user123');
|
||||
expect(decoded.role).toBe('admin');
|
||||
expect(decoded.iat).toBeDefined();
|
||||
expect(decoded.exp).toBeDefined();
|
||||
expect(decoded.scope).toEqual(['read', 'write']); // default scopes
|
||||
});
|
||||
|
||||
test('should respect custom expiration time', async () => {
|
||||
const payload = { sub: 'user123' };
|
||||
const token = await authManager.generateJWT(payload, '1h');
|
||||
const decoded = jwt.decode(token);
|
||||
|
||||
const expectedExp = decoded.iat + 3600; // 1 hour = 3600 seconds
|
||||
expect(decoded.exp).toBeCloseTo(expectedExp, -1); // Allow 1 sec tolerance
|
||||
});
|
||||
|
||||
test('should include custom scopes', async () => {
|
||||
const payload = { sub: 'user123', scope: ['read'] };
|
||||
const token = await authManager.generateJWT(payload);
|
||||
const decoded = jwt.decode(token);
|
||||
|
||||
expect(decoded.scope).toEqual(['read']);
|
||||
});
|
||||
|
||||
test('should reject JWT generation without sub claim', async () => {
|
||||
const payload = { role: 'admin' }; // Missing sub
|
||||
await expect(authManager.generateJWT(payload))
|
||||
.rejects.toThrow('JWT payload must include "sub"');
|
||||
});
|
||||
|
||||
test('should reject JWT generation with null payload', async () => {
|
||||
await expect(authManager.generateJWT(null))
|
||||
.rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('JWT Verification', () => {
|
||||
test('should verify valid JWT token', async () => {
|
||||
const payload = { sub: 'user123', role: 'admin' };
|
||||
const token = await authManager.generateJWT(payload);
|
||||
const verified = await authManager.verifyJWT(token);
|
||||
|
||||
expect(verified).toBeDefined();
|
||||
expect(verified.userId).toBe('user123');
|
||||
expect(verified.scope).toEqual(['read', 'write']);
|
||||
expect(verified.iat).toBeDefined();
|
||||
expect(verified.exp).toBeDefined();
|
||||
});
|
||||
|
||||
test('should reject invalid JWT token', async () => {
|
||||
const invalidToken = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.invalid.signature';
|
||||
const verified = await authManager.verifyJWT(invalidToken);
|
||||
|
||||
expect(verified).toBeNull();
|
||||
});
|
||||
|
||||
test('should reject malformed JWT token', async () => {
|
||||
const verified = await authManager.verifyJWT('not-a-jwt-token');
|
||||
expect(verified).toBeNull();
|
||||
});
|
||||
|
||||
test('should reject expired JWT token', async () => {
|
||||
const payload = { sub: 'user123' };
|
||||
const token = await authManager.generateJWT(payload, '-1s'); // Already expired
|
||||
|
||||
// Wait a tiny bit to ensure expiration
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
const verified = await authManager.verifyJWT(token);
|
||||
expect(verified).toBeNull();
|
||||
});
|
||||
|
||||
test('should reject JWT with wrong signature', async () => {
|
||||
const payload = { sub: 'user123' };
|
||||
const wrongSecret = 'wrong-secret-key';
|
||||
const token = jwt.sign(payload, wrongSecret);
|
||||
|
||||
const verified = await authManager.verifyJWT(token);
|
||||
expect(verified).toBeNull();
|
||||
});
|
||||
|
||||
test('should handle empty token gracefully', async () => {
|
||||
const verified = await authManager.verifyJWT('');
|
||||
expect(verified).toBeNull();
|
||||
});
|
||||
|
||||
test('should handle null token gracefully', async () => {
|
||||
const verified = await authManager.verifyJWT(null);
|
||||
expect(verified).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('API Key Generation', () => {
|
||||
test('should generate valid API key', async () => {
|
||||
const result = await authManager.generateAPIKey('test-key');
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.key).toMatch(/^dk_[a-f0-9]{32}_[a-f0-9]{64}$/);
|
||||
expect(result.id).toMatch(/^[a-f0-9]{32}$/);
|
||||
expect(result.name).toBe('test-key');
|
||||
expect(result.scopes).toEqual(['read', 'write']);
|
||||
expect(result.createdAt).toBeDefined();
|
||||
});
|
||||
|
||||
test('should generate unique API keys', async () => {
|
||||
const key1 = await authManager.generateAPIKey('key1');
|
||||
const key2 = await authManager.generateAPIKey('key2');
|
||||
|
||||
expect(key1.key).not.toBe(key2.key);
|
||||
expect(key1.id).not.toBe(key2.id);
|
||||
});
|
||||
|
||||
test('should accept custom scopes', async () => {
|
||||
const result = await authManager.generateAPIKey('readonly-key', ['read']);
|
||||
|
||||
expect(result.scopes).toEqual(['read']);
|
||||
});
|
||||
|
||||
test('should store API key in credential manager', async () => {
|
||||
await authManager.generateAPIKey('test-key');
|
||||
|
||||
expect(credentialManager.save).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/^auth\.apikey\./),
|
||||
expect.objectContaining({
|
||||
keySecret: expect.any(String),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('should store API key metadata', async () => {
|
||||
await authManager.generateAPIKey('test-key', ['read']);
|
||||
|
||||
expect(credentialManager.save).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/^auth\.metadata\./),
|
||||
expect.objectContaining({
|
||||
name: 'test-key',
|
||||
scopes: ['read'],
|
||||
createdAt: expect.any(String),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('should reject API key generation without name', async () => {
|
||||
await expect(authManager.generateAPIKey(''))
|
||||
.rejects.toThrow('API key name is required');
|
||||
|
||||
await expect(authManager.generateAPIKey(null))
|
||||
.rejects.toThrow('API key name is required');
|
||||
|
||||
await expect(authManager.generateAPIKey(undefined))
|
||||
.rejects.toThrow('API key name is required');
|
||||
});
|
||||
|
||||
test('should reject non-string name', async () => {
|
||||
await expect(authManager.generateAPIKey(123))
|
||||
.rejects.toThrow('API key name is required');
|
||||
|
||||
await expect(authManager.generateAPIKey({}))
|
||||
.rejects.toThrow('API key name is required');
|
||||
});
|
||||
});
|
||||
|
||||
describe('API Key Validation', () => {
|
||||
test('should validate correct API key', async () => {
|
||||
const { key, id } = await authManager.generateAPIKey('test-key');
|
||||
|
||||
// Mock credential manager to return the stored key
|
||||
credentialManager.get.mockResolvedValueOnce({
|
||||
keySecret: key.split('_')[2],
|
||||
});
|
||||
credentialManager.get.mockResolvedValueOnce({
|
||||
name: 'test-key',
|
||||
scopes: ['read', 'write'],
|
||||
createdAt: new Date().toISOString(),
|
||||
});
|
||||
|
||||
const validated = await authManager.validateAPIKey(key);
|
||||
|
||||
expect(validated).toBeDefined();
|
||||
expect(validated.valid).toBe(true);
|
||||
expect(validated.keyId).toBe(id);
|
||||
expect(validated.scopes).toEqual(['read', 'write']);
|
||||
});
|
||||
|
||||
test('should reject malformed API key', async () => {
|
||||
const validated = await authManager.validateAPIKey('not-an-api-key');
|
||||
|
||||
expect(validated.valid).toBe(false);
|
||||
});
|
||||
|
||||
test('should reject API key with wrong prefix', async () => {
|
||||
const validated = await authManager.validateAPIKey('sk_abc123_def456');
|
||||
|
||||
expect(validated.valid).toBe(false);
|
||||
});
|
||||
|
||||
test('should reject non-existent API key', async () => {
|
||||
const fakeKey = `dk_${ crypto.randomBytes(16).toString('hex') }_${ crypto.randomBytes(32).toString('hex')}`;
|
||||
credentialManager.get.mockResolvedValue(null); // Key doesn't exist
|
||||
|
||||
const validated = await authManager.validateAPIKey(fakeKey);
|
||||
|
||||
expect(validated.valid).toBe(false);
|
||||
});
|
||||
|
||||
test('should reject revoked API key', async () => {
|
||||
const { key } = await authManager.generateAPIKey('test-key');
|
||||
|
||||
credentialManager.get.mockResolvedValueOnce({
|
||||
keySecret: key.split('_')[2],
|
||||
revoked: true, // Key is revoked
|
||||
});
|
||||
|
||||
const validated = await authManager.validateAPIKey(key);
|
||||
|
||||
expect(validated.valid).toBe(false);
|
||||
});
|
||||
|
||||
test('should handle null key gracefully', async () => {
|
||||
const validated = await authManager.validateAPIKey(null);
|
||||
|
||||
expect(validated.valid).toBe(false);
|
||||
});
|
||||
|
||||
test('should handle empty key gracefully', async () => {
|
||||
const validated = await authManager.validateAPIKey('');
|
||||
|
||||
expect(validated.valid).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('API Key Revocation', () => {
|
||||
test('should revoke API key', async () => {
|
||||
const { id } = await authManager.generateAPIKey('test-key');
|
||||
|
||||
credentialManager.get.mockResolvedValue({
|
||||
keySecret: 'test-secret',
|
||||
});
|
||||
|
||||
const revoked = await authManager.revokeAPIKey(id);
|
||||
|
||||
expect(revoked).toBe(true);
|
||||
expect(credentialManager.save).toHaveBeenCalledWith(
|
||||
`auth.apikey.${id}`,
|
||||
expect.objectContaining({
|
||||
revoked: true,
|
||||
revokedAt: expect.any(String),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('should reject revoking non-existent key', async () => {
|
||||
credentialManager.get.mockResolvedValue(null);
|
||||
|
||||
await expect(authManager.revokeAPIKey('nonexistent'))
|
||||
.rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('API Key Listing', () => {
|
||||
test('should list all API keys with metadata', async () => {
|
||||
credentialManager.list.mockResolvedValue([
|
||||
'auth.metadata.key1',
|
||||
'auth.metadata.key2',
|
||||
]);
|
||||
|
||||
credentialManager.get.mockResolvedValueOnce({
|
||||
name: 'Key 1',
|
||||
scopes: ['read'],
|
||||
createdAt: '2026-01-01T00:00:00Z',
|
||||
});
|
||||
|
||||
credentialManager.get.mockResolvedValueOnce({
|
||||
name: 'Key 2',
|
||||
scopes: ['read', 'write'],
|
||||
createdAt: '2026-01-02T00:00:00Z',
|
||||
});
|
||||
|
||||
const keys = await authManager.listAPIKeys();
|
||||
|
||||
expect(keys).toHaveLength(2);
|
||||
expect(keys[0].name).toBe('Key 1');
|
||||
expect(keys[1].name).toBe('Key 2');
|
||||
});
|
||||
|
||||
test('should return empty array when no keys exist', async () => {
|
||||
credentialManager.list.mockResolvedValue([]);
|
||||
|
||||
const keys = await authManager.listAPIKeys();
|
||||
|
||||
expect(keys).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Security Boundaries', () => {
|
||||
test('should not log sensitive token data', async () => {
|
||||
const payload = { sub: 'user123' };
|
||||
const token = await authManager.generateJWT(payload);
|
||||
|
||||
// Logger should never be called with the actual token
|
||||
const { safeLog } = require('../logger-utils');
|
||||
const calls = safeLog.mock.calls.flat();
|
||||
expect(calls.some(arg => String(arg).includes(token))).toBe(false);
|
||||
});
|
||||
|
||||
test('should not log API key secrets', async () => {
|
||||
const { key } = await authManager.generateAPIKey('test-key');
|
||||
|
||||
const { safeLog } = require('../logger-utils');
|
||||
const calls = safeLog.mock.calls.flat();
|
||||
expect(calls.some(arg => String(arg).includes(key))).toBe(false);
|
||||
});
|
||||
|
||||
test('should generate cryptographically secure API keys', async () => {
|
||||
const key1 = await authManager.generateAPIKey('key1');
|
||||
const key2 = await authManager.generateAPIKey('key2');
|
||||
|
||||
// Keys should be unrelated (not sequential)
|
||||
expect(parseInt(key1.id, 16)).not.toBe(parseInt(key2.id, 16) + 1);
|
||||
expect(parseInt(key1.id, 16)).not.toBe(parseInt(key2.id, 16) - 1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,127 +0,0 @@
|
||||
/**
|
||||
* Auth Route Tests
|
||||
*
|
||||
* Tests TOTP configuration, session management, and SSO auth gate
|
||||
*/
|
||||
|
||||
const request = require('supertest');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
const testServicesFile = path.join(os.tmpdir(), `auth-services-${Date.now()}.json`);
|
||||
const testConfigFile = path.join(os.tmpdir(), `auth-config-${Date.now()}.json`);
|
||||
|
||||
process.env.SERVICES_FILE = testServicesFile;
|
||||
process.env.CONFIG_FILE = testConfigFile;
|
||||
process.env.ENABLE_HEALTH_CHECKER = 'false';
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
fs.writeFileSync(testServicesFile, '[]', 'utf8');
|
||||
fs.writeFileSync(testConfigFile, '{}', 'utf8');
|
||||
|
||||
const app = require('../server');
|
||||
|
||||
describe('Auth Routes', () => {
|
||||
afterAll(() => {
|
||||
try { fs.unlinkSync(testServicesFile); } catch (e) { /* ignore */ }
|
||||
try { fs.unlinkSync(testConfigFile); } catch (e) { /* ignore */ }
|
||||
});
|
||||
|
||||
describe('GET /api/totp/config', () => {
|
||||
test('should return TOTP configuration', async () => {
|
||||
const res = await request(app).get('/api/totp/config');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body.config).toHaveProperty('enabled');
|
||||
expect(res.body.config).toHaveProperty('sessionDuration');
|
||||
expect(res.body.config).toHaveProperty('isSetUp');
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/totp/setup', () => {
|
||||
test('should generate QR code and secret', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/totp/setup')
|
||||
.send({});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body).toHaveProperty('qrCode');
|
||||
expect(res.body).toHaveProperty('manualKey');
|
||||
expect(res.body.qrCode).toMatch(/^data:image\/png;base64,/);
|
||||
}, 15000);
|
||||
|
||||
test('should accept user-provided secret', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/totp/setup')
|
||||
.send({ secret: 'JBSWY3DPEHPK3PXP' });
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body.imported).toBe(true);
|
||||
expect(res.body.manualKey).toBe('JBSWY3DPEHPK3PXP');
|
||||
});
|
||||
|
||||
test('should reject invalid secret format', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/totp/setup')
|
||||
.send({ secret: 'not-base32!' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
expect(res.body.success).toBe(false);
|
||||
expect(res.body.error).toContain('Invalid secret');
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/totp/verify', () => {
|
||||
test('should reject missing code', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/totp/verify')
|
||||
.send({});
|
||||
|
||||
// Should fail — no code provided
|
||||
expect(res.statusCode).toBeGreaterThanOrEqual(400);
|
||||
});
|
||||
|
||||
test('should reject invalid code', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/totp/verify')
|
||||
.send({ code: '000000' });
|
||||
|
||||
// Should fail — wrong code (TOTP not set up or wrong)
|
||||
expect(res.statusCode).toBeGreaterThanOrEqual(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/totp/check-session', () => {
|
||||
test('should return session status', async () => {
|
||||
const res = await request(app).get('/api/totp/check-session');
|
||||
|
||||
// If TOTP is not enabled, should return authenticated: true
|
||||
// If enabled, should return 401 (no valid session)
|
||||
expect([200, 401]).toContain(res.statusCode);
|
||||
expect(res.body).toHaveProperty('authenticated');
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/auth/gate/:serviceId', () => {
|
||||
test('should handle unknown service', async () => {
|
||||
const res = await request(app).get('/api/auth/gate/nonexistent');
|
||||
|
||||
// Should return 200 with credentialsInjected: false (no creds found)
|
||||
// or 401 if TOTP required
|
||||
expect([200, 401]).toContain(res.statusCode);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/auth/app-token/:serviceId', () => {
|
||||
test('should handle unknown service', async () => {
|
||||
const res = await request(app).get('/api/auth/app-token/nonexistent');
|
||||
|
||||
// Should return 404 (service not found) or 401 (TOTP required)
|
||||
expect([401, 404, 500]).toContain(res.statusCode);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,209 +0,0 @@
|
||||
const crypto = require('crypto');
|
||||
const backupManager = require('../backup-manager');
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset singleton state
|
||||
backupManager.history = [];
|
||||
backupManager.config = { backups: {}, defaultRetention: { keep: 7 } };
|
||||
backupManager.running = false;
|
||||
for (const [, job] of backupManager.scheduledJobs.entries()) {
|
||||
clearInterval(job);
|
||||
}
|
||||
backupManager.scheduledJobs.clear();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
backupManager.stop();
|
||||
});
|
||||
|
||||
describe('calculateChecksum', () => {
|
||||
test('returns SHA-256 hex string', () => {
|
||||
const data = Buffer.from('test data');
|
||||
const checksum = backupManager.calculateChecksum(data);
|
||||
expect(checksum).toMatch(/^[0-9a-f]{64}$/);
|
||||
});
|
||||
|
||||
test('same data produces same checksum', () => {
|
||||
const data = Buffer.from('consistent');
|
||||
expect(backupManager.calculateChecksum(data)).toBe(backupManager.calculateChecksum(data));
|
||||
});
|
||||
|
||||
test('different data produces different checksum', () => {
|
||||
const a = backupManager.calculateChecksum(Buffer.from('aaa'));
|
||||
const b = backupManager.calculateChecksum(Buffer.from('bbb'));
|
||||
expect(a).not.toBe(b);
|
||||
});
|
||||
});
|
||||
|
||||
describe('compressBackup / decompressBackup', () => {
|
||||
test('round-trip preserves data', async () => {
|
||||
const original = { services: [{ id: 'test', name: 'Test' }], config: { theme: 'dark' } };
|
||||
const compressed = await backupManager.compressBackup(original);
|
||||
const decompressed = await backupManager.decompressBackup(compressed);
|
||||
expect(decompressed).toEqual(original);
|
||||
});
|
||||
|
||||
test('compressed output is a Buffer', async () => {
|
||||
const compressed = await backupManager.compressBackup({ test: true });
|
||||
expect(Buffer.isBuffer(compressed)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('encryptBackup / decryptBackup', () => {
|
||||
const testKey = crypto.randomBytes(32).toString('hex');
|
||||
|
||||
test('round-trip preserves data with valid key', async () => {
|
||||
const original = Buffer.from('backup data here');
|
||||
const encrypted = await backupManager.encryptBackup(original, testKey);
|
||||
const decrypted = await backupManager.decryptBackup(encrypted, testKey);
|
||||
expect(decrypted.toString()).toBe(original.toString());
|
||||
});
|
||||
|
||||
test('produces a non-empty buffer', async () => {
|
||||
const original = Buffer.from('backup data here');
|
||||
const encrypted = await backupManager.encryptBackup(original, testKey);
|
||||
expect(Buffer.isBuffer(encrypted)).toBe(true);
|
||||
expect(encrypted.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test('output differs from input', async () => {
|
||||
const original = Buffer.from('backup data here');
|
||||
const encrypted = await backupManager.encryptBackup(original, testKey);
|
||||
expect(encrypted.toString()).not.toBe(original.toString());
|
||||
});
|
||||
|
||||
test('throws on invalid encrypted format', async () => {
|
||||
await expect(backupManager.decryptBackup(Buffer.from('bad'), testKey)).rejects.toThrow();
|
||||
});
|
||||
|
||||
test('throws on wrong key', async () => {
|
||||
const original = Buffer.from('secret data');
|
||||
const encrypted = await backupManager.encryptBackup(original, testKey);
|
||||
const wrongKey = crypto.randomBytes(32).toString('hex');
|
||||
await expect(backupManager.decryptBackup(encrypted, wrongKey)).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('scheduleBackup', () => {
|
||||
beforeEach(() => {
|
||||
jest.useFakeTimers();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
test('parses hourly schedule', () => {
|
||||
backupManager.scheduleBackup('test', { schedule: 'hourly' });
|
||||
expect(backupManager.scheduledJobs.has('test')).toBe(true);
|
||||
});
|
||||
|
||||
test('parses daily schedule', () => {
|
||||
backupManager.scheduleBackup('test', { schedule: 'daily' });
|
||||
expect(backupManager.scheduledJobs.has('test')).toBe(true);
|
||||
});
|
||||
|
||||
test('parses weekly schedule', () => {
|
||||
backupManager.scheduleBackup('test', { schedule: 'weekly' });
|
||||
expect(backupManager.scheduledJobs.has('test')).toBe(true);
|
||||
});
|
||||
|
||||
test('parses monthly schedule', () => {
|
||||
backupManager.scheduleBackup('test', { schedule: 'monthly' });
|
||||
expect(backupManager.scheduledJobs.has('test')).toBe(true);
|
||||
});
|
||||
|
||||
test('parses custom numeric minute schedule', () => {
|
||||
backupManager.scheduleBackup('test', { schedule: '30' });
|
||||
expect(backupManager.scheduledJobs.has('test')).toBe(true);
|
||||
});
|
||||
|
||||
test('logs error for invalid schedule', () => {
|
||||
backupManager.scheduleBackup('test', { schedule: 'invalid' });
|
||||
expect(backupManager.scheduledJobs.has('test')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('addToHistory', () => {
|
||||
test('appends entry to history', () => {
|
||||
backupManager.addToHistory({ id: 'b1', status: 'success' });
|
||||
expect(backupManager.history).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('trims history to 100 entries', () => {
|
||||
for (let i = 0; i < 105; i++) {
|
||||
backupManager.addToHistory({ id: `b${i}`, status: 'success' });
|
||||
}
|
||||
expect(backupManager.history.length).toBeLessThanOrEqual(100);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getHistory', () => {
|
||||
test('returns entries in reverse order', () => {
|
||||
backupManager.addToHistory({ id: 'first' });
|
||||
backupManager.addToHistory({ id: 'second' });
|
||||
const history = backupManager.getHistory();
|
||||
expect(history[0].id).toBe('second');
|
||||
expect(history[1].id).toBe('first');
|
||||
});
|
||||
|
||||
test('respects limit parameter', () => {
|
||||
for (let i = 0; i < 10; i++) {
|
||||
backupManager.addToHistory({ id: `b${i}` });
|
||||
}
|
||||
expect(backupManager.getHistory(3)).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getConfig / updateConfig', () => {
|
||||
test('getConfig returns current config', () => {
|
||||
const config = backupManager.getConfig();
|
||||
expect(config).toHaveProperty('backups');
|
||||
});
|
||||
|
||||
test('updateConfig merges new config', () => {
|
||||
backupManager.updateConfig({ backups: { daily: { enabled: true, schedule: 'daily' } } });
|
||||
expect(backupManager.config.backups.daily).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('start / stop', () => {
|
||||
test('start sets running flag', () => {
|
||||
backupManager.start();
|
||||
expect(backupManager.running).toBe(true);
|
||||
backupManager.stop();
|
||||
});
|
||||
|
||||
test('start is idempotent', () => {
|
||||
backupManager.start();
|
||||
backupManager.start();
|
||||
expect(backupManager.running).toBe(true);
|
||||
backupManager.stop();
|
||||
});
|
||||
|
||||
test('stop clears running flag and jobs', () => {
|
||||
backupManager.start();
|
||||
backupManager.stop();
|
||||
expect(backupManager.running).toBe(false);
|
||||
expect(backupManager.scheduledJobs.size).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cleanupOldBackups', () => {
|
||||
test('keeps configured number of backups', async () => {
|
||||
// Add 5 successful backups for 'daily'
|
||||
for (let i = 0; i < 5; i++) {
|
||||
backupManager.history.push({
|
||||
id: `daily-${i}`,
|
||||
name: 'daily',
|
||||
status: 'success',
|
||||
timestamp: new Date(Date.now() - i * 86400000).toISOString(),
|
||||
locations: [{ type: 'local', path: `/tmp/fake-${i}.backup` }],
|
||||
});
|
||||
}
|
||||
|
||||
await backupManager.cleanupOldBackups('daily', { keep: 3 });
|
||||
const remaining = backupManager.history.filter(b => b.name === 'daily' && b.status === 'success');
|
||||
expect(remaining.length).toBe(3);
|
||||
});
|
||||
});
|
||||
@@ -1,64 +0,0 @@
|
||||
/**
|
||||
* Browse Route Tests
|
||||
*
|
||||
* Tests file browsing endpoints (roots, directories)
|
||||
*/
|
||||
|
||||
const request = require('supertest');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
const testServicesFile = path.join(os.tmpdir(), `browse-services-${Date.now()}.json`);
|
||||
const testConfigFile = path.join(os.tmpdir(), `browse-config-${Date.now()}.json`);
|
||||
|
||||
process.env.SERVICES_FILE = testServicesFile;
|
||||
process.env.CONFIG_FILE = testConfigFile;
|
||||
process.env.ENABLE_HEALTH_CHECKER = 'false';
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
fs.writeFileSync(testServicesFile, '[]', 'utf8');
|
||||
fs.writeFileSync(testConfigFile, '{}', 'utf8');
|
||||
|
||||
const app = require('../server');
|
||||
|
||||
describe('Browse Routes', () => {
|
||||
afterAll(() => {
|
||||
try { fs.unlinkSync(testServicesFile); } catch (e) { /* ignore */ }
|
||||
try { fs.unlinkSync(testConfigFile); } catch (e) { /* ignore */ }
|
||||
});
|
||||
|
||||
describe('GET /api/browse/roots', () => {
|
||||
test('should return 200 with success:true and roots array', async () => {
|
||||
const res = await request(app).get('/api/browse/roots');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(Array.isArray(res.body.roots)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/browse/directories', () => {
|
||||
test('should return 400 when path is missing', async () => {
|
||||
// When no path is provided and no MEDIA_BROWSE_ROOTS are configured,
|
||||
// the endpoint returns the roots listing (empty items) with success
|
||||
const res = await request(app).get('/api/browse/directories');
|
||||
|
||||
// Without MEDIA_BROWSE_ROOTS set, returns empty items list
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(Array.isArray(res.body.items)).toBe(true);
|
||||
expect(res.body.items.length).toBe(0);
|
||||
});
|
||||
|
||||
test('should return an error for path not in browseable roots', async () => {
|
||||
const res = await request(app)
|
||||
.get('/api/browse/directories')
|
||||
.query({ path: '/nonexistent' });
|
||||
|
||||
// Path is not in any configured browse root, so should return 400
|
||||
expect(res.statusCode).toBe(400);
|
||||
expect(res.body.success).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,117 +0,0 @@
|
||||
/**
|
||||
* Config Route Tests
|
||||
*
|
||||
* Tests DashCaddy configuration endpoints (get, save, delete)
|
||||
*/
|
||||
|
||||
const request = require('supertest');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
const testServicesFile = path.join(os.tmpdir(), `config-services-${Date.now()}.json`);
|
||||
const testConfigFile = path.join(os.tmpdir(), `config-config-${Date.now()}.json`);
|
||||
|
||||
process.env.SERVICES_FILE = testServicesFile;
|
||||
process.env.CONFIG_FILE = testConfigFile;
|
||||
process.env.ENABLE_HEALTH_CHECKER = 'false';
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
fs.writeFileSync(testServicesFile, '[]', 'utf8');
|
||||
fs.writeFileSync(testConfigFile, '{}', 'utf8');
|
||||
|
||||
const app = require('../server');
|
||||
|
||||
describe('Config Routes', () => {
|
||||
afterAll(() => {
|
||||
try { fs.unlinkSync(testServicesFile); } catch (e) { /* ignore */ }
|
||||
try { fs.unlinkSync(testConfigFile); } catch (e) { /* ignore */ }
|
||||
});
|
||||
|
||||
// Reset config file before each test to avoid leaking state
|
||||
beforeEach(() => {
|
||||
fs.writeFileSync(testConfigFile, '{}', 'utf8');
|
||||
});
|
||||
|
||||
describe('GET /api/config', () => {
|
||||
test('should return 200 with config object', async () => {
|
||||
const res = await request(app).get('/api/config');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(typeof res.body).toBe('object');
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/config', () => {
|
||||
test('should return 200 with success:true for valid config', async () => {
|
||||
const validConfig = {
|
||||
tld: 'sami',
|
||||
theme: 'dark',
|
||||
timezone: 'America/New_York',
|
||||
};
|
||||
|
||||
const res = await request(app)
|
||||
.post('/api/config')
|
||||
.send(validConfig);
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
|
||||
// Verify config was persisted
|
||||
const savedConfig = JSON.parse(fs.readFileSync(testConfigFile, 'utf8'));
|
||||
expect(savedConfig.tld).toBe('sami');
|
||||
expect(savedConfig.theme).toBe('dark');
|
||||
});
|
||||
|
||||
test('should return 400 for invalid config body', async () => {
|
||||
// Send a non-object body (string) which fails the typeof check
|
||||
const res = await request(app)
|
||||
.post('/api/config')
|
||||
.set('Content-Type', 'application/json')
|
||||
.send('"not an object"');
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test('should return 400 for config with invalid field values', async () => {
|
||||
const invalidConfig = {
|
||||
tld: 123, // tld must be a string
|
||||
dns: 'not-an-object', // dns must be an object
|
||||
};
|
||||
|
||||
const res = await request(app)
|
||||
.post('/api/config')
|
||||
.send(invalidConfig);
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /api/config', () => {
|
||||
test('should return 200 and reset config', async () => {
|
||||
// First save a config
|
||||
await request(app)
|
||||
.post('/api/config')
|
||||
.send({ tld: 'sami', theme: 'dark' });
|
||||
|
||||
// Then delete it
|
||||
const res = await request(app).delete('/api/config');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
|
||||
// Config file should no longer exist
|
||||
expect(fs.existsSync(testConfigFile)).toBe(false);
|
||||
});
|
||||
|
||||
test('should return 200 even when config does not exist', async () => {
|
||||
// Remove the config file first
|
||||
try { fs.unlinkSync(testConfigFile); } catch (e) { /* ignore */ }
|
||||
|
||||
const res = await request(app).delete('/api/config');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,73 +0,0 @@
|
||||
/**
|
||||
* Container Route Tests
|
||||
*
|
||||
* Tests Docker container management endpoints (start, stop, restart, discover)
|
||||
*/
|
||||
|
||||
const request = require('supertest');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
const testServicesFile = path.join(os.tmpdir(), `containers-services-${Date.now()}.json`);
|
||||
const testConfigFile = path.join(os.tmpdir(), `containers-config-${Date.now()}.json`);
|
||||
|
||||
process.env.SERVICES_FILE = testServicesFile;
|
||||
process.env.CONFIG_FILE = testConfigFile;
|
||||
process.env.ENABLE_HEALTH_CHECKER = 'false';
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
fs.writeFileSync(testServicesFile, '[]', 'utf8');
|
||||
fs.writeFileSync(testConfigFile, '{}', 'utf8');
|
||||
|
||||
const app = require('../server');
|
||||
|
||||
describe('Container Routes', () => {
|
||||
afterAll(() => {
|
||||
try { fs.unlinkSync(testServicesFile); } catch (e) { /* ignore */ }
|
||||
try { fs.unlinkSync(testConfigFile); } catch (e) { /* ignore */ }
|
||||
});
|
||||
|
||||
describe('POST /api/containers/:id/start', () => {
|
||||
test('should return error for invalid container ID', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/containers/nonexistent-container-id/start');
|
||||
|
||||
// Docker will reject the invalid container ID with an error
|
||||
expect(res.statusCode).toBeGreaterThanOrEqual(400);
|
||||
expect(res.body.success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/containers/:id/stop', () => {
|
||||
test('should return error for invalid container ID', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/containers/nonexistent-container-id/stop');
|
||||
|
||||
// Docker will reject the invalid container ID with an error
|
||||
expect(res.statusCode).toBeGreaterThanOrEqual(400);
|
||||
expect(res.body.success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/containers/:id/restart', () => {
|
||||
test('should return error for invalid container ID', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/containers/nonexistent-container-id/restart');
|
||||
|
||||
// Docker will reject the invalid container ID with an error
|
||||
expect(res.statusCode).toBeGreaterThanOrEqual(400);
|
||||
expect(res.body.success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/containers/discover', () => {
|
||||
test('should return 200 with containers array', async () => {
|
||||
const res = await request(app).get('/api/containers/discover');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(Array.isArray(res.body.containers)).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,838 +0,0 @@
|
||||
// credential-manager depends on keychain-manager and crypto-utils (both singletons).
|
||||
// crypto-utils is already initialized via jest.setup.js env var.
|
||||
// keychain-manager may not have OS keychain available in test env.
|
||||
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
|
||||
const credentialManager = require('../credential-manager');
|
||||
|
||||
// Use a temp file for credentials in tests
|
||||
const TEMP_CREDS_FILE = path.join(os.tmpdir(), 'dashcaddy-test-creds.json');
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset singleton state
|
||||
credentialManager.cache.clear();
|
||||
// Clean up temp file
|
||||
if (fs.existsSync(TEMP_CREDS_FILE)) {
|
||||
fs.unlinkSync(TEMP_CREDS_FILE);
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
if (fs.existsSync(TEMP_CREDS_FILE)) {
|
||||
fs.unlinkSync(TEMP_CREDS_FILE);
|
||||
}
|
||||
});
|
||||
|
||||
describe('store', () => {
|
||||
test('rejects invalid key (null)', async () => {
|
||||
const result = await credentialManager.store(null, 'value');
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
test('rejects invalid key (non-string)', async () => {
|
||||
const result = await credentialManager.store(123, 'value');
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
test('rejects invalid value (null)', async () => {
|
||||
const result = await credentialManager.store('key', null);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
test('rejects invalid value (non-string)', async () => {
|
||||
const result = await credentialManager.store('key', 123);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
test('stores credential and caches it', async () => {
|
||||
const result = await credentialManager.store('test.key', 'secret123');
|
||||
expect(result).toBe(true);
|
||||
expect(credentialManager.cache.get('test.key')).toBe('secret123');
|
||||
});
|
||||
|
||||
test('handles very long credential values', async () => {
|
||||
const longValue = 'x'.repeat(100000); // 100KB value
|
||||
const result = await credentialManager.store('long.value', longValue);
|
||||
expect(result).toBe(true);
|
||||
|
||||
const retrieved = await credentialManager.retrieve('long.value');
|
||||
expect(retrieved).toBe(longValue);
|
||||
});
|
||||
|
||||
test('handles special characters in keys', async () => {
|
||||
const specialKeys = [
|
||||
'key.with.dots',
|
||||
'key-with-dashes',
|
||||
'key_with_underscores',
|
||||
'key:with:colons',
|
||||
'key/with/slashes',
|
||||
];
|
||||
|
||||
for (const key of specialKeys) {
|
||||
const result = await credentialManager.store(key, 'value');
|
||||
expect(result).toBe(true);
|
||||
expect(await credentialManager.retrieve(key)).toBe('value');
|
||||
}
|
||||
});
|
||||
|
||||
test('handles special characters in values', async () => {
|
||||
const specialValues = [
|
||||
'password!@#$%^&*()',
|
||||
'token\nwith\nnewlines',
|
||||
'json{"key":"value"}',
|
||||
'unicode=<3D><><EFBFBD>=<3D><><EFBFBD>G<EFBFBD><47>',
|
||||
'quotes"and\'apostrophes',
|
||||
];
|
||||
|
||||
for (let i = 0; i < specialValues.length; i++) {
|
||||
const key = `special.${i}`;
|
||||
const result = await credentialManager.store(key, specialValues[i]);
|
||||
expect(result).toBe(true);
|
||||
expect(await credentialManager.retrieve(key)).toBe(specialValues[i]);
|
||||
}
|
||||
});
|
||||
|
||||
test('overwrites existing credential', async () => {
|
||||
await credentialManager.store('overwrite.key', 'original');
|
||||
expect(await credentialManager.retrieve('overwrite.key')).toBe('original');
|
||||
|
||||
await credentialManager.store('overwrite.key', 'updated');
|
||||
expect(await credentialManager.retrieve('overwrite.key')).toBe('updated');
|
||||
});
|
||||
});
|
||||
|
||||
describe('retrieve', () => {
|
||||
test('returns cached value when available', async () => {
|
||||
credentialManager.cache.set('cached.key', 'cached-value');
|
||||
const result = await credentialManager.retrieve('cached.key');
|
||||
expect(result).toBe('cached-value');
|
||||
});
|
||||
|
||||
test('returns null for non-existent key', async () => {
|
||||
const result = await credentialManager.retrieve('nonexistent');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
test('handles empty string key', async () => {
|
||||
const result = await credentialManager.retrieve('');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('store + retrieve round-trip', () => {
|
||||
test('retrieves what was stored', async () => {
|
||||
await credentialManager.store('roundtrip.key', 'my-secret');
|
||||
// Clear cache to force file read
|
||||
credentialManager.cache.clear();
|
||||
const result = await credentialManager.retrieve('roundtrip.key');
|
||||
expect(result).toBe('my-secret');
|
||||
});
|
||||
|
||||
test('handles binary-like data (base64)', async () => {
|
||||
const binaryData = Buffer.from('binary content').toString('base64');
|
||||
await credentialManager.store('binary.key', binaryData);
|
||||
credentialManager.cache.clear();
|
||||
const result = await credentialManager.retrieve('binary.key');
|
||||
expect(result).toBe(binaryData);
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete', () => {
|
||||
test('removes from cache', async () => {
|
||||
await credentialManager.store('delete.key', 'value');
|
||||
expect(credentialManager.cache.has('delete.key')).toBe(true);
|
||||
await credentialManager.delete('delete.key');
|
||||
expect(credentialManager.cache.has('delete.key')).toBe(false);
|
||||
});
|
||||
|
||||
test('deleted credential cannot be retrieved', async () => {
|
||||
await credentialManager.store('delete2.key', 'value');
|
||||
await credentialManager.delete('delete2.key');
|
||||
credentialManager.cache.clear();
|
||||
const result = await credentialManager.retrieve('delete2.key');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
test('deleting non-existent key does not throw', async () => {
|
||||
await expect(credentialManager.delete('nonexistent')).resolves.not.toThrow();
|
||||
});
|
||||
|
||||
test('multiple deletes are idempotent', async () => {
|
||||
await credentialManager.store('idempotent.key', 'value');
|
||||
await credentialManager.delete('idempotent.key');
|
||||
await credentialManager.delete('idempotent.key');
|
||||
await credentialManager.delete('idempotent.key');
|
||||
|
||||
expect(await credentialManager.retrieve('idempotent.key')).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('list', () => {
|
||||
test('returns array of credential keys', async () => {
|
||||
await credentialManager.store('list.a', 'val1');
|
||||
await credentialManager.store('list.b', 'val2');
|
||||
const keys = await credentialManager.list();
|
||||
expect(keys).toContain('list.a');
|
||||
expect(keys).toContain('list.b');
|
||||
});
|
||||
|
||||
test('returns empty array when no credentials', async () => {
|
||||
const keys = await credentialManager.list();
|
||||
expect(Array.isArray(keys)).toBe(true);
|
||||
});
|
||||
|
||||
test('does not include deleted keys', async () => {
|
||||
await credentialManager.store('list.deleted', 'value');
|
||||
await credentialManager.delete('list.deleted');
|
||||
|
||||
const keys = await credentialManager.list();
|
||||
expect(keys).not.toContain('list.deleted');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMetadata', () => {
|
||||
test('returns metadata for existing key', async () => {
|
||||
await credentialManager.store('meta.key', 'val', { description: 'Test credential' });
|
||||
const meta = await credentialManager.getMetadata('meta.key');
|
||||
expect(meta).toEqual({ description: 'Test credential' });
|
||||
});
|
||||
|
||||
test('returns null for non-existent key', async () => {
|
||||
const meta = await credentialManager.getMetadata('nonexistent');
|
||||
expect(meta).toBeNull();
|
||||
});
|
||||
|
||||
test('handles metadata with multiple fields', async () => {
|
||||
const metadata = {
|
||||
description: 'API Key',
|
||||
service: 'GitHub',
|
||||
expiresAt: '2026-12-31',
|
||||
createdBy: 'admin',
|
||||
};
|
||||
|
||||
await credentialManager.store('meta.complex', 'value', metadata);
|
||||
const retrieved = await credentialManager.getMetadata('meta.complex');
|
||||
expect(retrieved).toEqual(metadata);
|
||||
});
|
||||
});
|
||||
|
||||
describe('exportBackup / importBackup', () => {
|
||||
test('export returns encrypted string', async () => {
|
||||
await credentialManager.store('backup.key', 'backup-value');
|
||||
const backup = await credentialManager.exportBackup();
|
||||
expect(typeof backup).toBe('string');
|
||||
expect(backup.split(':').length).toBe(3); // iv:authTag:ciphertext
|
||||
});
|
||||
|
||||
test('import restores credentials from backup', async () => {
|
||||
await credentialManager.store('backup.key', 'backup-value');
|
||||
const backup = await credentialManager.exportBackup();
|
||||
|
||||
// Clear everything
|
||||
await credentialManager.delete('backup.key');
|
||||
credentialManager.cache.clear();
|
||||
|
||||
// Import backup
|
||||
const result = await credentialManager.importBackup(backup);
|
||||
expect(result).toBe(true);
|
||||
|
||||
// Verify restored
|
||||
const keys = await credentialManager.list();
|
||||
expect(keys).toContain('backup.key');
|
||||
});
|
||||
|
||||
test('importBackup rejects unsupported version', async () => {
|
||||
const cryptoUtils = require('../crypto-utils');
|
||||
const badBackup = cryptoUtils.encrypt(JSON.stringify({ version: '99.0', credentials: {} }));
|
||||
const result = await credentialManager.importBackup(badBackup);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
test('export includes metadata', async () => {
|
||||
await credentialManager.store('backup.meta', 'value', { description: 'Metadata test' });
|
||||
const backup = await credentialManager.exportBackup();
|
||||
|
||||
await credentialManager.delete('backup.meta');
|
||||
await credentialManager.importBackup(backup);
|
||||
|
||||
const meta = await credentialManager.getMetadata('backup.meta');
|
||||
expect(meta).toHaveProperty('description', 'Metadata test');
|
||||
});
|
||||
|
||||
test('import does not corrupt existing credentials', async () => {
|
||||
await credentialManager.store('existing.key', 'existing-value');
|
||||
|
||||
// Create backup with different credential
|
||||
await credentialManager.store('backup.key', 'backup-value');
|
||||
const backup = await credentialManager.exportBackup();
|
||||
await credentialManager.delete('backup.key');
|
||||
|
||||
// Import should add backup.key without affecting existing.key
|
||||
await credentialManager.importBackup(backup);
|
||||
|
||||
expect(await credentialManager.retrieve('existing.key')).toBe('existing-value');
|
||||
expect(await credentialManager.retrieve('backup.key')).toBe('backup-value');
|
||||
});
|
||||
|
||||
test('handles empty backup', async () => {
|
||||
const cryptoUtils = require('../crypto-utils');
|
||||
const emptyBackup = cryptoUtils.encrypt(JSON.stringify({ version: '1.0', credentials: {} }));
|
||||
|
||||
const result = await credentialManager.importBackup(emptyBackup);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
test('handles large backup (stress test)', async () => {
|
||||
// Create 100 credentials
|
||||
for (let i = 0; i < 100; i++) {
|
||||
await credentialManager.store(`stress.${i}`, `value${i}`);
|
||||
}
|
||||
|
||||
const backup = await credentialManager.exportBackup();
|
||||
expect(backup.length).toBeGreaterThan(1000);
|
||||
|
||||
// Clear and restore
|
||||
for (let i = 0; i < 100; i++) {
|
||||
await credentialManager.delete(`stress.${i}`);
|
||||
}
|
||||
|
||||
const result = await credentialManager.importBackup(backup);
|
||||
expect(result).toBe(true);
|
||||
|
||||
const keys = await credentialManager.list();
|
||||
expect(keys.filter(k => k.startsWith('stress.')).length).toBe(100);
|
||||
});
|
||||
});
|
||||
|
||||
describe('migrateToEncrypted', () => {
|
||||
test('returns migration count', async () => {
|
||||
const result = await credentialManager.migrateToEncrypted();
|
||||
expect(result).toHaveProperty('migrated');
|
||||
expect(result).toHaveProperty('skipped');
|
||||
expect(result).toHaveProperty('total');
|
||||
});
|
||||
|
||||
test('migration is idempotent', async () => {
|
||||
const result1 = await credentialManager.migrateToEncrypted();
|
||||
const result2 = await credentialManager.migrateToEncrypted();
|
||||
|
||||
expect(result2.migrated).toBe(0); // Nothing left to migrate
|
||||
});
|
||||
});
|
||||
|
||||
describe('Concurrent Access', () => {
|
||||
test('handles concurrent writes to same key', async () => {
|
||||
const promises = [
|
||||
credentialManager.store('concurrent.key', 'value1'),
|
||||
credentialManager.store('concurrent.key', 'value2'),
|
||||
credentialManager.store('concurrent.key', 'value3'),
|
||||
];
|
||||
|
||||
await Promise.all(promises);
|
||||
|
||||
// One of them should have won
|
||||
const final = await credentialManager.retrieve('concurrent.key');
|
||||
expect(['value1', 'value2', 'value3']).toContain(final);
|
||||
});
|
||||
|
||||
test('handles concurrent writes to different keys', async () => {
|
||||
const promises = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
promises.push(credentialManager.store(`concurrent.${i}`, `value${i}`));
|
||||
}
|
||||
|
||||
await Promise.all(promises);
|
||||
|
||||
// All should be stored
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const value = await credentialManager.retrieve(`concurrent.${i}`);
|
||||
expect(value).toBe(`value${i}`);
|
||||
}
|
||||
});
|
||||
|
||||
test('handles concurrent read/write', async () => {
|
||||
await credentialManager.store('readwrite.key', 'initial');
|
||||
|
||||
const promises = [
|
||||
credentialManager.retrieve('readwrite.key'),
|
||||
credentialManager.store('readwrite.key', 'updated'),
|
||||
credentialManager.retrieve('readwrite.key'),
|
||||
];
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
|
||||
// Should not throw or corrupt
|
||||
expect(results[0]).toBeTruthy();
|
||||
expect(results[1]).toBe(true);
|
||||
expect(results[2]).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
test('handles credential with empty string value', async () => {
|
||||
const result = await credentialManager.store('empty.value', '');
|
||||
expect(result).toBe(true);
|
||||
expect(await credentialManager.retrieve('empty.value')).toBe('');
|
||||
});
|
||||
|
||||
test('does not leak credentials in error messages', async () => {
|
||||
// This is a security test - errors should not contain credential values
|
||||
try {
|
||||
// Try to trigger an error condition
|
||||
await credentialManager.store('error.test', 'secret-password-123');
|
||||
// Force an error by corrupting internal state
|
||||
} catch (error) {
|
||||
expect(error.message).not.toContain('secret-password-123');
|
||||
}
|
||||
});
|
||||
|
||||
test('cache size does not grow indefinitely', async () => {
|
||||
// Store many credentials
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
await credentialManager.store(`cache.${i}`, `value${i}`);
|
||||
}
|
||||
|
||||
// Cache should still work
|
||||
const result = await credentialManager.retrieve('cache.999');
|
||||
expect(result).toBe('value999');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Cache Behavior', () => {
|
||||
test('cache speeds up repeated retrievals', async () => {
|
||||
await credentialManager.store('cache.perf', 'value');
|
||||
|
||||
// First retrieval (from disk)
|
||||
const start1 = Date.now();
|
||||
await credentialManager.retrieve('cache.perf');
|
||||
const time1 = Date.now() - start1;
|
||||
|
||||
// Second retrieval (from cache)
|
||||
const start2 = Date.now();
|
||||
await credentialManager.retrieve('cache.perf');
|
||||
const time2 = Date.now() - start2;
|
||||
|
||||
// Cached should be faster (though this is not a guarantee in all test envs)
|
||||
expect(time2).toBeLessThanOrEqual(time1 + 5);
|
||||
});
|
||||
|
||||
test('cache invalidation on delete', async () => {
|
||||
await credentialManager.store('cache.delete', 'value');
|
||||
expect(credentialManager.cache.has('cache.delete')).toBe(true);
|
||||
|
||||
await credentialManager.delete('cache.delete');
|
||||
expect(credentialManager.cache.has('cache.delete')).toBe(false);
|
||||
});
|
||||
|
||||
test('cache invalidation on store', async () => {
|
||||
await credentialManager.store('cache.update', 'original');
|
||||
expect(credentialManager.cache.get('cache.update')).toBe('original');
|
||||
|
||||
await credentialManager.store('cache.update', 'updated');
|
||||
expect(credentialManager.cache.get('cache.update')).toBe('updated');
|
||||
});
|
||||
});
|
||||
|
||||
// ========== EXTENDED COVERAGE TESTS ==========
|
||||
// Additional tests for concurrency, edge cases, encryption, and recovery
|
||||
|
||||
describe('Credential Manager - Extended Coverage', () => {
|
||||
describe('Concurrent Access', () => {
|
||||
test('should handle concurrent store operations', async () => {
|
||||
// Store multiple credentials concurrently
|
||||
const promises = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
promises.push(credentialManager.store(`concurrent.${i}`, `value${i}`));
|
||||
}
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
|
||||
// All should succeed
|
||||
results.forEach(result => expect(result).toBe(true));
|
||||
|
||||
// All should be retrievable
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const value = await credentialManager.retrieve(`concurrent.${i}`);
|
||||
expect(value).toBe(`value${i}`);
|
||||
}
|
||||
});
|
||||
|
||||
test('should handle concurrent retrieve operations', async () => {
|
||||
// Store a credential
|
||||
await credentialManager.store('shared.key', 'shared-value');
|
||||
|
||||
// Retrieve it concurrently multiple times
|
||||
const promises = [];
|
||||
for (let i = 0; i < 20; i++) {
|
||||
promises.push(credentialManager.retrieve('shared.key'));
|
||||
}
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
|
||||
// All should return the same value
|
||||
results.forEach(result => expect(result).toBe('shared-value'));
|
||||
});
|
||||
|
||||
test('should handle concurrent store/retrieve mix', async () => {
|
||||
const operations = [];
|
||||
|
||||
// Mix of stores and retrieves
|
||||
for (let i = 0; i < 5; i++) {
|
||||
operations.push(credentialManager.store(`mix.${i}`, `value${i}`));
|
||||
operations.push(credentialManager.retrieve(`mix.${i}`));
|
||||
}
|
||||
|
||||
// Should not throw
|
||||
await expect(Promise.all(operations)).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
test('should handle concurrent delete operations safely', async () => {
|
||||
await credentialManager.store('delete.concurrent', 'value');
|
||||
|
||||
// Try to delete the same key concurrently
|
||||
const promises = [
|
||||
credentialManager.delete('delete.concurrent'),
|
||||
credentialManager.delete('delete.concurrent'),
|
||||
credentialManager.delete('delete.concurrent'),
|
||||
];
|
||||
|
||||
// Should not throw
|
||||
await expect(Promise.all(promises)).resolves.toBeDefined();
|
||||
|
||||
// Key should be gone
|
||||
const value = await credentialManager.retrieve('delete.concurrent');
|
||||
expect(value).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Special Characters & Edge Cases', () => {
|
||||
test('should handle very long credential values', async () => {
|
||||
const longValue = 'x'.repeat(10000);
|
||||
|
||||
const stored = await credentialManager.store('long.value', longValue);
|
||||
expect(stored).toBe(true);
|
||||
|
||||
credentialManager.cache.clear();
|
||||
const retrieved = await credentialManager.retrieve('long.value');
|
||||
expect(retrieved).toBe(longValue);
|
||||
});
|
||||
|
||||
test('should handle credential values with special characters', async () => {
|
||||
const specialChars = '!@#$%^&*()_+-=[]{}|;:\'",.<>?/~`\n\r\t\\';
|
||||
|
||||
const stored = await credentialManager.store('special.chars', specialChars);
|
||||
expect(stored).toBe(true);
|
||||
|
||||
credentialManager.cache.clear();
|
||||
const retrieved = await credentialManager.retrieve('special.chars');
|
||||
expect(retrieved).toBe(specialChars);
|
||||
});
|
||||
|
||||
test('should handle unicode characters', async () => {
|
||||
const unicode = 'S+<2B>s<EFBFBD>+S+<2B>t<EFBFBD><74> =<3D><><EFBFBD> +<2B>+<2B>+<2B>+<2B>+<2B> +<2B>+<2B>+<2B>+<2B>+<2B>+<2B>';
|
||||
|
||||
const stored = await credentialManager.store('unicode.key', unicode);
|
||||
expect(stored).toBe(true);
|
||||
|
||||
credentialManager.cache.clear();
|
||||
const retrieved = await credentialManager.retrieve('unicode.key');
|
||||
expect(retrieved).toBe(unicode);
|
||||
});
|
||||
|
||||
test('should handle JSON-like strings', async () => {
|
||||
const jsonString = '{"nested": {"key": "value"}, "array": [1,2,3]}';
|
||||
|
||||
const stored = await credentialManager.store('json.string', jsonString);
|
||||
expect(stored).toBe(true);
|
||||
|
||||
credentialManager.cache.clear();
|
||||
const retrieved = await credentialManager.retrieve('json.string');
|
||||
expect(retrieved).toBe(jsonString);
|
||||
});
|
||||
|
||||
test('should handle empty string values', async () => {
|
||||
const stored = await credentialManager.store('empty.string', '');
|
||||
expect(stored).toBe(true);
|
||||
|
||||
credentialManager.cache.clear();
|
||||
const retrieved = await credentialManager.retrieve('empty.string');
|
||||
expect(retrieved).toBe('');
|
||||
});
|
||||
|
||||
test('should handle whitespace-only values', async () => {
|
||||
const whitespace = ' \n\t ';
|
||||
|
||||
const stored = await credentialManager.store('whitespace.key', whitespace);
|
||||
expect(stored).toBe(true);
|
||||
|
||||
credentialManager.cache.clear();
|
||||
const retrieved = await credentialManager.retrieve('whitespace.key');
|
||||
expect(retrieved).toBe(whitespace);
|
||||
});
|
||||
|
||||
test('should handle keys with dots and dashes', async () => {
|
||||
const complexKey = 'my-app.production.database.password';
|
||||
|
||||
const stored = await credentialManager.store(complexKey, 'secret123');
|
||||
expect(stored).toBe(true);
|
||||
|
||||
const retrieved = await credentialManager.retrieve(complexKey);
|
||||
expect(retrieved).toBe('secret123');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Encryption & Security', () => {
|
||||
test('should encrypt credentials before storage', async () => {
|
||||
await credentialManager.store('encrypt.test', 'plaintext-secret');
|
||||
|
||||
// Try to read the file directly
|
||||
// If properly encrypted, the plaintext should not appear in the file
|
||||
// (This is a basic check - actual encryption is tested in crypto-utils.test.js)
|
||||
const keys = await credentialManager.list();
|
||||
expect(keys).toContain('encrypt.test');
|
||||
});
|
||||
|
||||
test('should not leak credentials in error messages', async () => {
|
||||
// Store a credential
|
||||
await credentialManager.store('sensitive.key', 'super-secret-password');
|
||||
|
||||
// The cache should contain the value, but stringifying shouldn't expose it
|
||||
const cacheString = JSON.stringify(credentialManager.cache);
|
||||
|
||||
// This is implementation-dependent, but generally caches are Map objects
|
||||
// which stringify to empty objects
|
||||
expect(cacheString).not.toContain('super-secret-password');
|
||||
});
|
||||
|
||||
test('should handle corrupted credential data gracefully', async () => {
|
||||
// This would require mocking file I/O or crypto-utils
|
||||
// For now, test that invalid keys return null
|
||||
const result = await credentialManager.retrieve('definitely.not.real');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Metadata Operations', () => {
|
||||
test('should store and retrieve metadata', async () => {
|
||||
const metadata = {
|
||||
description: 'Production database password',
|
||||
createdAt: new Date().toISOString(),
|
||||
owner: 'admin',
|
||||
tags: ['production', 'database'],
|
||||
};
|
||||
|
||||
await credentialManager.store('meta.full', 'value', metadata);
|
||||
|
||||
const retrieved = await credentialManager.getMetadata('meta.full');
|
||||
expect(retrieved).toEqual(metadata);
|
||||
});
|
||||
|
||||
test('should allow updating metadata without changing value', async () => {
|
||||
await credentialManager.store('meta.update', 'original-value', { version: 1 });
|
||||
|
||||
// Update metadata
|
||||
await credentialManager.store('meta.update', 'original-value', { version: 2, updated: true });
|
||||
|
||||
const meta = await credentialManager.getMetadata('meta.update');
|
||||
expect(meta.version).toBe(2);
|
||||
expect(meta.updated).toBe(true);
|
||||
|
||||
// Value should be unchanged
|
||||
const value = await credentialManager.retrieve('meta.update');
|
||||
expect(value).toBe('original-value');
|
||||
});
|
||||
|
||||
test('should handle metadata with special characters', async () => {
|
||||
const metadata = {
|
||||
description: 'Test with "quotes" and \'apostrophes\'',
|
||||
notes: 'Line 1\nLine 2\tTabbed',
|
||||
};
|
||||
|
||||
await credentialManager.store('meta.special', 'value', metadata);
|
||||
|
||||
const retrieved = await credentialManager.getMetadata('meta.special');
|
||||
expect(retrieved.description).toBe(metadata.description);
|
||||
expect(retrieved.notes).toBe(metadata.notes);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Backup & Restore', () => {
|
||||
test('should preserve metadata in backup', async () => {
|
||||
const metadata = { description: 'Important credential', priority: 'high' };
|
||||
await credentialManager.store('backup.meta', 'value123', metadata);
|
||||
|
||||
const backup = await credentialManager.exportBackup();
|
||||
|
||||
// Clear everything
|
||||
await credentialManager.delete('backup.meta');
|
||||
credentialManager.cache.clear();
|
||||
|
||||
// Restore
|
||||
await credentialManager.importBackup(backup);
|
||||
|
||||
// Check metadata preserved
|
||||
const restoredMeta = await credentialManager.getMetadata('backup.meta');
|
||||
expect(restoredMeta).toEqual(metadata);
|
||||
});
|
||||
|
||||
test('should handle backup of empty credential store', async () => {
|
||||
const backup = await credentialManager.exportBackup();
|
||||
expect(typeof backup).toBe('string');
|
||||
expect(backup.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test('should handle importing same backup multiple times', async () => {
|
||||
await credentialManager.store('backup.repeat', 'value1');
|
||||
const backup = await credentialManager.exportBackup();
|
||||
|
||||
// Import once
|
||||
await credentialManager.importBackup(backup);
|
||||
|
||||
// Import again
|
||||
const result = await credentialManager.importBackup(backup);
|
||||
expect(result).toBe(true);
|
||||
|
||||
// Should not cause duplicates or errors
|
||||
const keys = await credentialManager.list();
|
||||
const count = keys.filter(k => k === 'backup.repeat').length;
|
||||
expect(count).toBe(1);
|
||||
});
|
||||
|
||||
test('should handle corrupted backup data gracefully', async () => {
|
||||
const result = await credentialManager.importBackup('corrupted:data:here');
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
test('should handle empty backup string', async () => {
|
||||
const result = await credentialManager.importBackup('');
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
test('should handle backup with invalid JSON', async () => {
|
||||
const cryptoUtils = require('../crypto-utils');
|
||||
const invalidBackup = cryptoUtils.encrypt('{ invalid json }');
|
||||
|
||||
const result = await credentialManager.importBackup(invalidBackup);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Cache Behavior', () => {
|
||||
test('should cache retrieved values', async () => {
|
||||
await credentialManager.store('cache.test', 'cached-value');
|
||||
|
||||
// First retrieval
|
||||
await credentialManager.retrieve('cache.test');
|
||||
expect(credentialManager.cache.has('cache.test')).toBe(true);
|
||||
|
||||
// Second retrieval should use cache
|
||||
const cached = await credentialManager.retrieve('cache.test');
|
||||
expect(cached).toBe('cached-value');
|
||||
});
|
||||
|
||||
test('should invalidate cache on delete', async () => {
|
||||
await credentialManager.store('cache.delete', 'value');
|
||||
await credentialManager.retrieve('cache.delete');
|
||||
|
||||
expect(credentialManager.cache.has('cache.delete')).toBe(true);
|
||||
|
||||
await credentialManager.delete('cache.delete');
|
||||
expect(credentialManager.cache.has('cache.delete')).toBe(false);
|
||||
});
|
||||
|
||||
test('should invalidate cache on store update', async () => {
|
||||
await credentialManager.store('cache.update', 'original');
|
||||
await credentialManager.retrieve('cache.update');
|
||||
|
||||
// Update the credential
|
||||
await credentialManager.store('cache.update', 'updated');
|
||||
|
||||
// Cache should have new value
|
||||
expect(credentialManager.cache.get('cache.update')).toBe('updated');
|
||||
|
||||
// Retrieval should return updated value
|
||||
credentialManager.cache.clear();
|
||||
const retrieved = await credentialManager.retrieve('cache.update');
|
||||
expect(retrieved).toBe('updated');
|
||||
});
|
||||
|
||||
test('should handle cache clearing during operations', async () => {
|
||||
await credentialManager.store('cache.clear', 'value1');
|
||||
|
||||
// Clear cache manually
|
||||
credentialManager.cache.clear();
|
||||
|
||||
// Should still be able to retrieve from storage
|
||||
const retrieved = await credentialManager.retrieve('cache.clear');
|
||||
expect(retrieved).toBe('value1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('List Operations', () => {
|
||||
test('should list credentials in sorted order', async () => {
|
||||
await credentialManager.store('zebra', 'val1');
|
||||
await credentialManager.store('alpha', 'val2');
|
||||
await credentialManager.store('beta', 'val3');
|
||||
|
||||
const keys = await credentialManager.list();
|
||||
|
||||
// Should be sorted
|
||||
const sortedKeys = [...keys].sort();
|
||||
expect(keys).toEqual(sortedKeys);
|
||||
});
|
||||
|
||||
test('should not include deleted credentials in list', async () => {
|
||||
await credentialManager.store('list.keep', 'val1');
|
||||
await credentialManager.store('list.delete', 'val2');
|
||||
|
||||
await credentialManager.delete('list.delete');
|
||||
|
||||
const keys = await credentialManager.list();
|
||||
expect(keys).toContain('list.keep');
|
||||
expect(keys).not.toContain('list.delete');
|
||||
});
|
||||
|
||||
test('should return unique keys only', async () => {
|
||||
await credentialManager.store('unique.key', 'val1');
|
||||
await credentialManager.store('unique.key', 'val2'); // Update
|
||||
|
||||
const keys = await credentialManager.list();
|
||||
const uniqueCount = keys.filter(k => k === 'unique.key').length;
|
||||
expect(uniqueCount).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error Handling & Recovery', () => {
|
||||
test('should handle retrieve errors gracefully', async () => {
|
||||
// Try to retrieve with invalid key types
|
||||
const result1 = await credentialManager.retrieve(null);
|
||||
const result2 = await credentialManager.retrieve(undefined);
|
||||
const result3 = await credentialManager.retrieve('');
|
||||
|
||||
expect(result1).toBeNull();
|
||||
expect(result2).toBeNull();
|
||||
expect(result3).toBeNull();
|
||||
});
|
||||
|
||||
test('should handle delete of non-existent credential', async () => {
|
||||
// Should not throw
|
||||
await expect(credentialManager.delete('nonexistent.key')).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
test('should recover from partial operations', async () => {
|
||||
// Store a credential
|
||||
await credentialManager.store('recover.test', 'original');
|
||||
|
||||
// Try to store invalid data
|
||||
await credentialManager.store('recover.test', null);
|
||||
|
||||
// Original should still be intact
|
||||
credentialManager.cache.clear();
|
||||
const retrieved = await credentialManager.retrieve('recover.test');
|
||||
expect(retrieved).toBe('original');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,54 +0,0 @@
|
||||
/**
|
||||
* Credentials Route Tests
|
||||
*
|
||||
* Tests credential listing and encryption key rotation endpoints
|
||||
*/
|
||||
|
||||
const request = require('supertest');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
const testServicesFile = path.join(os.tmpdir(), `credentials-services-${Date.now()}.json`);
|
||||
const testConfigFile = path.join(os.tmpdir(), `credentials-config-${Date.now()}.json`);
|
||||
|
||||
process.env.SERVICES_FILE = testServicesFile;
|
||||
process.env.CONFIG_FILE = testConfigFile;
|
||||
process.env.ENABLE_HEALTH_CHECKER = 'false';
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
fs.writeFileSync(testServicesFile, '[]', 'utf8');
|
||||
fs.writeFileSync(testConfigFile, '{}', 'utf8');
|
||||
|
||||
const app = require('../server');
|
||||
|
||||
describe('Credentials Routes', () => {
|
||||
afterAll(() => {
|
||||
try { fs.unlinkSync(testServicesFile); } catch (e) { /* ignore */ }
|
||||
try { fs.unlinkSync(testConfigFile); } catch (e) { /* ignore */ }
|
||||
});
|
||||
|
||||
describe('GET /api/credentials/list', () => {
|
||||
test('should return 200 with credentials array', async () => {
|
||||
const res = await request(app).get('/api/credentials/list');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(Array.isArray(res.body.credentials)).toBe(true);
|
||||
expect(typeof res.body.count).toBe('number');
|
||||
expect(res.body.count).toBe(res.body.credentials.length);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/credentials/rotate-key', () => {
|
||||
test('should return 200 with success true', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/credentials/rotate-key')
|
||||
.send({});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body).toHaveProperty('message');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,290 +0,0 @@
|
||||
// crypto-utils exports a module that calls loadOrCreateKey() at load time (line 263).
|
||||
// The jest.setup.js sets DASHCADDY_ENCRYPTION_KEY env var so it uses a deterministic key.
|
||||
|
||||
const cryptoUtils = require('../crypto-utils');
|
||||
|
||||
describe('encrypt / decrypt', () => {
|
||||
test('round-trips a string', () => {
|
||||
const plaintext = 'hello world';
|
||||
const encrypted = cryptoUtils.encrypt(plaintext);
|
||||
const decrypted = cryptoUtils.decrypt(encrypted);
|
||||
expect(decrypted).toBe(plaintext);
|
||||
});
|
||||
|
||||
test('round-trips an object via JSON', () => {
|
||||
const obj = { user: 'admin', pass: 'secret123' };
|
||||
const encrypted = cryptoUtils.encrypt(obj);
|
||||
const decrypted = JSON.parse(cryptoUtils.decrypt(encrypted));
|
||||
expect(decrypted).toEqual(obj);
|
||||
});
|
||||
|
||||
test('encrypted output differs from plaintext', () => {
|
||||
const plaintext = 'sensitive data';
|
||||
const encrypted = cryptoUtils.encrypt(plaintext);
|
||||
expect(encrypted).not.toBe(plaintext);
|
||||
});
|
||||
|
||||
test('encrypted format is iv:authTag:ciphertext (3 colon-separated parts)', () => {
|
||||
const encrypted = cryptoUtils.encrypt('test');
|
||||
const parts = encrypted.split(':');
|
||||
expect(parts.length).toBe(3);
|
||||
});
|
||||
|
||||
test('each encryption produces different output (random IV)', () => {
|
||||
const plaintext = 'same input';
|
||||
const enc1 = cryptoUtils.encrypt(plaintext);
|
||||
const enc2 = cryptoUtils.encrypt(plaintext);
|
||||
expect(enc1).not.toBe(enc2);
|
||||
// But both decrypt to same value
|
||||
expect(cryptoUtils.decrypt(enc1)).toBe(plaintext);
|
||||
expect(cryptoUtils.decrypt(enc2)).toBe(plaintext);
|
||||
});
|
||||
|
||||
test('throws on tampered ciphertext', () => {
|
||||
const encrypted = cryptoUtils.encrypt('test');
|
||||
const parts = encrypted.split(':');
|
||||
parts[2] = `AAAA${ parts[2].slice(4)}`; // tamper with ciphertext
|
||||
expect(() => cryptoUtils.decrypt(parts.join(':'))).toThrow();
|
||||
});
|
||||
|
||||
test('throws on tampered authTag', () => {
|
||||
const encrypted = cryptoUtils.encrypt('test');
|
||||
const parts = encrypted.split(':');
|
||||
parts[1] = `AAAA${ parts[1].slice(4)}`; // tamper with auth tag
|
||||
expect(() => cryptoUtils.decrypt(parts.join(':'))).toThrow();
|
||||
});
|
||||
|
||||
test('throws on invalid encrypted format (wrong number of parts)', () => {
|
||||
expect(() => cryptoUtils.decrypt('only:two')).toThrow('Invalid encrypted data format');
|
||||
expect(() => cryptoUtils.decrypt('just-one')).toThrow('Invalid encrypted data format');
|
||||
});
|
||||
|
||||
test('handles empty string', () => {
|
||||
const encrypted = cryptoUtils.encrypt('');
|
||||
expect(cryptoUtils.decrypt(encrypted)).toBe('');
|
||||
});
|
||||
|
||||
test('handles special characters', () => {
|
||||
const special = 'p@$$w0rd!<>&"\';DROP TABLE--';
|
||||
expect(cryptoUtils.decrypt(cryptoUtils.encrypt(special))).toBe(special);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isEncrypted', () => {
|
||||
test('returns true for encrypted strings', () => {
|
||||
const encrypted = cryptoUtils.encrypt('test');
|
||||
expect(cryptoUtils.isEncrypted(encrypted)).toBe(true);
|
||||
});
|
||||
|
||||
test('returns false for plain strings', () => {
|
||||
expect(cryptoUtils.isEncrypted('hello world')).toBe(false);
|
||||
});
|
||||
|
||||
test('returns false for non-string input', () => {
|
||||
expect(cryptoUtils.isEncrypted(123)).toBe(false);
|
||||
expect(cryptoUtils.isEncrypted(null)).toBe(false);
|
||||
expect(cryptoUtils.isEncrypted(undefined)).toBe(false);
|
||||
});
|
||||
|
||||
test('returns false for string with wrong number of colons', () => {
|
||||
expect(cryptoUtils.isEncrypted('one:two')).toBe(false);
|
||||
expect(cryptoUtils.isEncrypted('one:two:three:four')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('encryptFields', () => {
|
||||
test('encrypts only specified fields', () => {
|
||||
const obj = { username: 'admin', password: 'secret', role: 'user' };
|
||||
const result = cryptoUtils.encryptFields(obj, ['password']);
|
||||
expect(result.username).toBe('admin');
|
||||
expect(result.role).toBe('user');
|
||||
expect(result.password).not.toBe('secret');
|
||||
expect(cryptoUtils.isEncrypted(result.password)).toBe(true);
|
||||
});
|
||||
|
||||
test('leaves non-specified fields unchanged', () => {
|
||||
const obj = { a: '1', b: '2', c: '3' };
|
||||
const result = cryptoUtils.encryptFields(obj, ['a']);
|
||||
expect(result.b).toBe('2');
|
||||
expect(result.c).toBe('3');
|
||||
});
|
||||
|
||||
test('adds _encrypted marker', () => {
|
||||
const result = cryptoUtils.encryptFields({ x: 'y' }, ['x']);
|
||||
expect(result._encrypted).toBe(true);
|
||||
});
|
||||
|
||||
test('adds _encryptedFields list', () => {
|
||||
const result = cryptoUtils.encryptFields({ x: 'y' }, ['x']);
|
||||
expect(result._encryptedFields).toEqual(['x']);
|
||||
});
|
||||
|
||||
test('does not double-encrypt already-encrypted fields', () => {
|
||||
const obj = { password: 'secret' };
|
||||
const first = cryptoUtils.encryptFields(obj, ['password']);
|
||||
const second = cryptoUtils.encryptFields(first, ['password']);
|
||||
// Should still be decryptable to original
|
||||
expect(cryptoUtils.decrypt(second.password)).toBe('secret');
|
||||
});
|
||||
|
||||
test('skips null/undefined fields', () => {
|
||||
const obj = { a: null, b: undefined, c: 'val' };
|
||||
const result = cryptoUtils.encryptFields(obj, ['a', 'b', 'c']);
|
||||
expect(result.a).toBeNull();
|
||||
expect(result.b).toBeUndefined();
|
||||
expect(cryptoUtils.isEncrypted(result.c)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('decryptFields', () => {
|
||||
test('decrypts specified fields', () => {
|
||||
const encrypted = cryptoUtils.encryptFields({ password: 'secret', name: 'test' }, ['password']);
|
||||
const decrypted = cryptoUtils.decryptFields(encrypted, ['password']);
|
||||
expect(decrypted.password).toBe('secret');
|
||||
expect(decrypted.name).toBe('test');
|
||||
});
|
||||
|
||||
test('returns object without encryption markers', () => {
|
||||
const encrypted = cryptoUtils.encryptFields({ x: 'y' }, ['x']);
|
||||
const decrypted = cryptoUtils.decryptFields(encrypted);
|
||||
expect(decrypted._encrypted).toBeUndefined();
|
||||
expect(decrypted._encryptedFields).toBeUndefined();
|
||||
});
|
||||
|
||||
test('returns object as-is when _encrypted is false/absent', () => {
|
||||
const obj = { a: '1', b: '2' };
|
||||
const result = cryptoUtils.decryptFields(obj);
|
||||
expect(result).toEqual(obj);
|
||||
});
|
||||
|
||||
test('uses _encryptedFields when fields param is null', () => {
|
||||
const encrypted = cryptoUtils.encryptFields({ password: 'secret', token: 'abc' }, ['password', 'token']);
|
||||
const decrypted = cryptoUtils.decryptFields(encrypted);
|
||||
expect(decrypted.password).toBe('secret');
|
||||
expect(decrypted.token).toBe('abc');
|
||||
});
|
||||
});
|
||||
|
||||
describe('encryptFields + decryptFields round-trip', () => {
|
||||
test('full round-trip preserves all field values', () => {
|
||||
const original = { user: 'admin', pass: 'p@ss', apiKey: 'key123', role: 'editor' };
|
||||
const fields = ['pass', 'apiKey'];
|
||||
const encrypted = cryptoUtils.encryptFields(original, fields);
|
||||
const decrypted = cryptoUtils.decryptFields(encrypted, fields);
|
||||
expect(decrypted.user).toBe(original.user);
|
||||
expect(decrypted.pass).toBe(original.pass);
|
||||
expect(decrypted.apiKey).toBe(original.apiKey);
|
||||
expect(decrypted.role).toBe(original.role);
|
||||
});
|
||||
});
|
||||
|
||||
describe('migrateToEncrypted', () => {
|
||||
test('encrypts plaintext credentials', () => {
|
||||
const plain = { password: 'secret', token: 'abc123' };
|
||||
const result = cryptoUtils.migrateToEncrypted(plain, ['password', 'token']);
|
||||
expect(result._encrypted).toBe(true);
|
||||
expect(cryptoUtils.isEncrypted(result.password)).toBe(true);
|
||||
});
|
||||
|
||||
test('returns already-encrypted credentials unchanged', () => {
|
||||
const encrypted = cryptoUtils.encryptFields({ password: 'secret' }, ['password']);
|
||||
const result = cryptoUtils.migrateToEncrypted(encrypted, ['password']);
|
||||
expect(result).toEqual(encrypted);
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadOrCreateKey', () => {
|
||||
test('returns a buffer', () => {
|
||||
const key = cryptoUtils.loadOrCreateKey();
|
||||
expect(Buffer.isBuffer(key)).toBe(true);
|
||||
});
|
||||
|
||||
test('returns 32-byte key', () => {
|
||||
const key = cryptoUtils.loadOrCreateKey();
|
||||
expect(key.length).toBe(32);
|
||||
});
|
||||
|
||||
test('returns cached key on subsequent calls', () => {
|
||||
const key1 = cryptoUtils.loadOrCreateKey();
|
||||
const key2 = cryptoUtils.loadOrCreateKey();
|
||||
expect(key1).toBe(key2); // same reference (cached)
|
||||
});
|
||||
});
|
||||
|
||||
describe('readEncryptedFile', () => {
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
|
||||
test('returns null when file does not exist', () => {
|
||||
const result = cryptoUtils.readEncryptedFile('/nonexistent/file.json');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
test('reads and returns plaintext JSON file', () => {
|
||||
const tmpFile = path.join(os.tmpdir(), 'dashcaddy-test-plain.json');
|
||||
fs.writeFileSync(tmpFile, JSON.stringify({ username: 'admin', password: 'plain' }));
|
||||
try {
|
||||
const result = cryptoUtils.readEncryptedFile(tmpFile);
|
||||
expect(result.username).toBe('admin');
|
||||
expect(result.password).toBe('plain');
|
||||
} finally {
|
||||
fs.unlinkSync(tmpFile);
|
||||
}
|
||||
});
|
||||
|
||||
test('reads and decrypts encrypted JSON file', () => {
|
||||
const tmpFile = path.join(os.tmpdir(), 'dashcaddy-test-encrypted.json');
|
||||
const data = { username: 'admin', password: 'secret' };
|
||||
cryptoUtils.writeEncryptedFile(tmpFile, data, ['password']);
|
||||
try {
|
||||
const result = cryptoUtils.readEncryptedFile(tmpFile, ['password']);
|
||||
expect(result.username).toBe('admin');
|
||||
expect(result.password).toBe('secret');
|
||||
} finally {
|
||||
fs.unlinkSync(tmpFile);
|
||||
}
|
||||
});
|
||||
|
||||
test('returns null on JSON parse error', () => {
|
||||
const tmpFile = path.join(os.tmpdir(), 'dashcaddy-test-bad.json');
|
||||
fs.writeFileSync(tmpFile, 'not json at all {{{');
|
||||
try {
|
||||
const result = cryptoUtils.readEncryptedFile(tmpFile);
|
||||
expect(result).toBeNull();
|
||||
} finally {
|
||||
fs.unlinkSync(tmpFile);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('writeEncryptedFile', () => {
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
|
||||
test('writes valid JSON to disk', () => {
|
||||
const tmpFile = path.join(os.tmpdir(), 'dashcaddy-test-write.json');
|
||||
cryptoUtils.writeEncryptedFile(tmpFile, { user: 'test', token: 'abc' }, ['token']);
|
||||
try {
|
||||
const content = JSON.parse(fs.readFileSync(tmpFile, 'utf8'));
|
||||
expect(content._encrypted).toBe(true);
|
||||
expect(content.user).toBe('test');
|
||||
expect(cryptoUtils.isEncrypted(content.token)).toBe(true);
|
||||
} finally {
|
||||
fs.unlinkSync(tmpFile);
|
||||
}
|
||||
});
|
||||
|
||||
test('encrypts specified fields', () => {
|
||||
const tmpFile = path.join(os.tmpdir(), 'dashcaddy-test-write2.json');
|
||||
cryptoUtils.writeEncryptedFile(tmpFile, { a: 'plain', b: 'secret' }, ['b']);
|
||||
try {
|
||||
const content = JSON.parse(fs.readFileSync(tmpFile, 'utf8'));
|
||||
expect(content.a).toBe('plain');
|
||||
expect(content.b).not.toBe('secret');
|
||||
} finally {
|
||||
fs.unlinkSync(tmpFile);
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -1,142 +0,0 @@
|
||||
/**
|
||||
* DNS Route Tests
|
||||
*
|
||||
* Tests DNS record management endpoints (create, delete, resolve)
|
||||
* Note: All DNS routes require a token. We pass token='test-token' to bypass
|
||||
* credential lookup (requireDnsToken returns providedToken if truthy).
|
||||
*/
|
||||
|
||||
const request = require('supertest');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
const testServicesFile = path.join(os.tmpdir(), `dns-services-${Date.now()}.json`);
|
||||
const testConfigFile = path.join(os.tmpdir(), `dns-config-${Date.now()}.json`);
|
||||
|
||||
process.env.SERVICES_FILE = testServicesFile;
|
||||
process.env.CONFIG_FILE = testConfigFile;
|
||||
process.env.ENABLE_HEALTH_CHECKER = 'false';
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
fs.writeFileSync(testServicesFile, '[]', 'utf8');
|
||||
fs.writeFileSync(testConfigFile, '{}', 'utf8');
|
||||
|
||||
const app = require('../server');
|
||||
|
||||
describe('DNS Routes', () => {
|
||||
afterAll(() => {
|
||||
try { fs.unlinkSync(testServicesFile); } catch (e) { /* ignore */ }
|
||||
try { fs.unlinkSync(testConfigFile); } catch (e) { /* ignore */ }
|
||||
});
|
||||
|
||||
describe('POST /api/dns/record', () => {
|
||||
test('should reject missing domain', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/dns/record')
|
||||
.send({ ip: '192.168.1.1', token: 'test-token' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
expect(res.body.success).toBe(false);
|
||||
expect(res.body.error).toContain('domain');
|
||||
});
|
||||
|
||||
test('should reject missing ip', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/dns/record')
|
||||
.send({ domain: 'test.sami', token: 'test-token' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
expect(res.body.success).toBe(false);
|
||||
});
|
||||
|
||||
test('should reject invalid domain format', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/dns/record')
|
||||
.send({ domain: '!!!invalid!!!', ip: '192.168.1.1', token: 'test-token' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
expect(res.body.error).toContain('Invalid domain');
|
||||
});
|
||||
|
||||
test('should reject invalid IP address', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/dns/record')
|
||||
.send({ domain: 'test.sami', ip: 'not-an-ip', token: 'test-token' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
expect(res.body.error).toContain('Invalid IP');
|
||||
});
|
||||
|
||||
test('should reject invalid TTL', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/dns/record')
|
||||
.send({ domain: 'test.sami', ip: '192.168.1.1', ttl: 10, token: 'test-token' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
expect(res.body.error).toContain('TTL');
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /api/dns/record', () => {
|
||||
test('should reject missing domain', async () => {
|
||||
const res = await request(app)
|
||||
.delete('/api/dns/record')
|
||||
.query({ token: 'test-token' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
expect(res.body.error).toContain('domain');
|
||||
});
|
||||
|
||||
test('should reject invalid domain format', async () => {
|
||||
const res = await request(app)
|
||||
.delete('/api/dns/record')
|
||||
.query({ domain: '!!!bad!!!', token: 'test-token' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test('should reject invalid record type', async () => {
|
||||
const res = await request(app)
|
||||
.delete('/api/dns/record')
|
||||
.query({ domain: 'test.sami', type: 'INVALID', token: 'test-token' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
expect(res.body.error).toContain('Invalid DNS record type');
|
||||
});
|
||||
|
||||
test('should reject invalid IP address in query', async () => {
|
||||
const res = await request(app)
|
||||
.delete('/api/dns/record')
|
||||
.query({ domain: 'test.sami', ipAddress: 'not-ip', token: 'test-token' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test('should reject invalid server address', async () => {
|
||||
const res = await request(app)
|
||||
.delete('/api/dns/record')
|
||||
.query({ domain: 'test.sami', server: 'not-ip', token: 'test-token' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/dns/resolve', () => {
|
||||
test('should reject missing domain', async () => {
|
||||
const res = await request(app)
|
||||
.get('/api/dns/resolve')
|
||||
.query({ token: 'test-token' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test('should reject invalid domain format', async () => {
|
||||
const res = await request(app)
|
||||
.get('/api/dns/resolve')
|
||||
.query({ domain: '!!!bad!!!', token: 'test-token' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,438 +0,0 @@
|
||||
/**
|
||||
* Docker Security Module Tests
|
||||
* Tests for image digest verification, security modes, and trusted digest management
|
||||
*
|
||||
* Note: These tests focus on the security logic and configuration management.
|
||||
* Docker API integration tests are handled separately.
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
// We'll test the module with a temp config file to avoid affecting production
|
||||
const TEST_CONFIG_PATH = path.join(__dirname, 'test-docker-security-config.json');
|
||||
|
||||
describe('DockerSecurity Module', () => {
|
||||
let dockerSecurity;
|
||||
const originalEnv = process.env.DOCKER_SECURITY_CONFIG;
|
||||
|
||||
beforeAll(() => {
|
||||
// Point to test config file
|
||||
process.env.DOCKER_SECURITY_CONFIG = TEST_CONFIG_PATH;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
// Clean up test config before each test
|
||||
if (fs.existsSync(TEST_CONFIG_PATH)) {
|
||||
fs.unlinkSync(TEST_CONFIG_PATH);
|
||||
}
|
||||
|
||||
// Clear module cache and reload
|
||||
delete require.cache[require.resolve('../docker-security')];
|
||||
dockerSecurity = require('../docker-security');
|
||||
|
||||
// Clear any existing trusted digests
|
||||
dockerSecurity.config.trustedDigests = {};
|
||||
dockerSecurity.config.verificationMode = 'verify';
|
||||
dockerSecurity.mode = 'verify';
|
||||
dockerSecurity.saveConfig();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Clean up test config
|
||||
if (fs.existsSync(TEST_CONFIG_PATH)) {
|
||||
fs.unlinkSync(TEST_CONFIG_PATH);
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Restore original env
|
||||
if (originalEnv) {
|
||||
process.env.DOCKER_SECURITY_CONFIG = originalEnv;
|
||||
} else {
|
||||
delete process.env.DOCKER_SECURITY_CONFIG;
|
||||
}
|
||||
});
|
||||
|
||||
describe('Configuration Management', () => {
|
||||
test('should create default config when file does not exist', () => {
|
||||
expect(dockerSecurity.config).toBeDefined();
|
||||
expect(dockerSecurity.config).toHaveProperty('trustedDigests');
|
||||
expect(dockerSecurity.config).toHaveProperty('verificationMode');
|
||||
expect(dockerSecurity.config).toHaveProperty('allowUnverified');
|
||||
expect(dockerSecurity.config).toHaveProperty('updateTrustedOnPull');
|
||||
});
|
||||
|
||||
test('should save and load config correctly', () => {
|
||||
// Add a trusted digest
|
||||
dockerSecurity.setTrustedDigest('test:v1', 'sha256:test123');
|
||||
|
||||
// Reload module
|
||||
delete require.cache[require.resolve('../docker-security')];
|
||||
const reloaded = require('../docker-security');
|
||||
|
||||
// Should have loaded the saved config
|
||||
expect(reloaded.config.trustedDigests['test:v1']).toBe('sha256:test123');
|
||||
});
|
||||
|
||||
test('should handle corrupted config file gracefully', () => {
|
||||
// Write corrupted JSON
|
||||
fs.writeFileSync(TEST_CONFIG_PATH, '{ invalid json');
|
||||
|
||||
// Reload - should not crash
|
||||
delete require.cache[require.resolve('../docker-security')];
|
||||
const fresh = require('../docker-security');
|
||||
|
||||
expect(fresh.config).toBeDefined();
|
||||
expect(fresh.config.trustedDigests).toBeDefined();
|
||||
});
|
||||
|
||||
test('should persist changes to disk', () => {
|
||||
dockerSecurity.setTrustedDigest('nginx:latest', 'sha256:abc123');
|
||||
|
||||
// Config file should exist
|
||||
expect(fs.existsSync(TEST_CONFIG_PATH)).toBe(true);
|
||||
|
||||
// Should be valid JSON
|
||||
const savedData = fs.readFileSync(TEST_CONFIG_PATH, 'utf8');
|
||||
const parsed = JSON.parse(savedData);
|
||||
expect(parsed.trustedDigests['nginx:latest']).toBe('sha256:abc123');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Trusted Digest Management', () => {
|
||||
test('should add new trusted digest', () => {
|
||||
dockerSecurity.setTrustedDigest('postgres:14', 'sha256:newdigest');
|
||||
|
||||
expect(dockerSecurity.config.trustedDigests['postgres:14']).toBe('sha256:newdigest');
|
||||
});
|
||||
|
||||
test('should update existing trusted digest', () => {
|
||||
dockerSecurity.setTrustedDigest('nginx:latest', 'sha256:original');
|
||||
dockerSecurity.setTrustedDigest('nginx:latest', 'sha256:updated');
|
||||
|
||||
expect(dockerSecurity.config.trustedDigests['nginx:latest']).toBe('sha256:updated');
|
||||
});
|
||||
|
||||
test('should remove trusted digest', () => {
|
||||
dockerSecurity.setTrustedDigest('nginx:latest', 'sha256:test');
|
||||
expect(dockerSecurity.config.trustedDigests['nginx:latest']).toBeDefined();
|
||||
|
||||
dockerSecurity.removeTrustedDigest('nginx:latest');
|
||||
expect(dockerSecurity.config.trustedDigests['nginx:latest']).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should get all trusted digests', () => {
|
||||
dockerSecurity.setTrustedDigest('image1', 'sha256:digest1');
|
||||
dockerSecurity.setTrustedDigest('image2', 'sha256:digest2');
|
||||
|
||||
const digests = dockerSecurity.getTrustedDigests();
|
||||
|
||||
expect(digests).toHaveProperty('image1');
|
||||
expect(digests).toHaveProperty('image2');
|
||||
expect(Object.keys(digests).length).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
|
||||
test('should return copy of trusted digests (not reference)', () => {
|
||||
dockerSecurity.setTrustedDigest('original', 'sha256:original');
|
||||
|
||||
const digests = dockerSecurity.getTrustedDigests();
|
||||
digests['modified'] = 'sha256:modified';
|
||||
|
||||
expect(dockerSecurity.config.trustedDigests['modified']).toBeUndefined();
|
||||
expect(dockerSecurity.config.trustedDigests['original']).toBe('sha256:original');
|
||||
});
|
||||
|
||||
test('should handle image names with special characters', () => {
|
||||
const specialName = 'my-app_v2.0:latest';
|
||||
|
||||
dockerSecurity.setTrustedDigest(specialName, 'sha256:special');
|
||||
expect(dockerSecurity.config.trustedDigests[specialName]).toBe('sha256:special');
|
||||
});
|
||||
|
||||
test('should handle very long image names', () => {
|
||||
const longName = `registry.example.com/team/project/${ 'a'.repeat(100) }:v1.2.3`;
|
||||
|
||||
dockerSecurity.setTrustedDigest(longName, 'sha256:long');
|
||||
expect(dockerSecurity.config.trustedDigests[longName]).toBe('sha256:long');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Security Modes', () => {
|
||||
test('should set mode to strict', () => {
|
||||
dockerSecurity.setMode('strict');
|
||||
|
||||
expect(dockerSecurity.mode).toBe('strict');
|
||||
expect(dockerSecurity.config.verificationMode).toBe('strict');
|
||||
});
|
||||
|
||||
test('should set mode to verify', () => {
|
||||
dockerSecurity.setMode('verify');
|
||||
|
||||
expect(dockerSecurity.mode).toBe('verify');
|
||||
expect(dockerSecurity.config.verificationMode).toBe('verify');
|
||||
});
|
||||
|
||||
test('should set mode to permissive', () => {
|
||||
dockerSecurity.setMode('permissive');
|
||||
|
||||
expect(dockerSecurity.mode).toBe('permissive');
|
||||
expect(dockerSecurity.config.verificationMode).toBe('permissive');
|
||||
});
|
||||
|
||||
test('should reject invalid mode', () => {
|
||||
expect(() => dockerSecurity.setMode('invalid')).toThrow('Invalid mode');
|
||||
expect(() => dockerSecurity.setMode('STRICT')).toThrow('Invalid mode');
|
||||
expect(() => dockerSecurity.setMode('')).toThrow('Invalid mode');
|
||||
});
|
||||
|
||||
test('should persist mode change to config file', () => {
|
||||
dockerSecurity.setMode('strict');
|
||||
|
||||
const savedData = fs.readFileSync(TEST_CONFIG_PATH, 'utf8');
|
||||
const parsed = JSON.parse(savedData);
|
||||
expect(parsed.verificationMode).toBe('strict');
|
||||
});
|
||||
|
||||
test('should load mode from config on startup', () => {
|
||||
dockerSecurity.setMode('strict');
|
||||
|
||||
// Reload module
|
||||
delete require.cache[require.resolve('../docker-security')];
|
||||
const reloaded = require('../docker-security');
|
||||
|
||||
expect(reloaded.mode).toBe('strict');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Digest Verification Logic', () => {
|
||||
test('should accept matching digest', async () => {
|
||||
dockerSecurity.setTrustedDigest('nginx:latest', 'sha256:trusted123');
|
||||
|
||||
const result = await dockerSecurity.verifyImageDigest('nginx:latest', 'sha256:trusted123');
|
||||
|
||||
expect(result.verified).toBe(true);
|
||||
expect(result.action).toBe('accept');
|
||||
expect(result.reason).toContain('matches trusted value');
|
||||
});
|
||||
|
||||
test('should reject mismatched digest in strict mode', async () => {
|
||||
dockerSecurity.setMode('strict');
|
||||
dockerSecurity.setTrustedDigest('nginx:latest', 'sha256:trusted123');
|
||||
|
||||
const result = await dockerSecurity.verifyImageDigest('nginx:latest', 'sha256:different');
|
||||
|
||||
expect(result.verified).toBe(false);
|
||||
expect(result.action).toBe('reject');
|
||||
expect(result.reason).toContain('mismatch');
|
||||
});
|
||||
|
||||
test('should warn on mismatched digest in verify mode', async () => {
|
||||
dockerSecurity.setMode('verify');
|
||||
dockerSecurity.setTrustedDigest('nginx:latest', 'sha256:trusted123');
|
||||
|
||||
const result = await dockerSecurity.verifyImageDigest('nginx:latest', 'sha256:different');
|
||||
|
||||
expect(result.verified).toBe(false);
|
||||
expect(result.action).toBe('warn');
|
||||
expect(result.reason).toContain('mismatch');
|
||||
});
|
||||
|
||||
test('should accept mismatched digest in permissive mode', async () => {
|
||||
dockerSecurity.setMode('permissive');
|
||||
dockerSecurity.setTrustedDigest('nginx:latest', 'sha256:trusted123');
|
||||
|
||||
const result = await dockerSecurity.verifyImageDigest('nginx:latest', 'sha256:different');
|
||||
|
||||
expect(result.verified).toBe(true);
|
||||
expect(result.action).toBe('accept');
|
||||
expect(result.reason).toContain('permissive mode');
|
||||
});
|
||||
|
||||
test('should reject unknown image in strict mode', async () => {
|
||||
dockerSecurity.setMode('strict');
|
||||
|
||||
const result = await dockerSecurity.verifyImageDigest('unknown:latest', 'sha256:anything');
|
||||
|
||||
expect(result.verified).toBe(false);
|
||||
expect(result.action).toBe('reject');
|
||||
expect(result.reason).toContain('No trusted digest');
|
||||
});
|
||||
|
||||
test('should accept unknown image in verify mode', async () => {
|
||||
dockerSecurity.setMode('verify');
|
||||
|
||||
const result = await dockerSecurity.verifyImageDigest('unknown:latest', 'sha256:anything');
|
||||
|
||||
expect(result.verified).toBe(true);
|
||||
expect(result.action).toBe('accept');
|
||||
});
|
||||
|
||||
test('should accept and auto-trust unknown image when updateTrustedOnPull enabled', async () => {
|
||||
dockerSecurity.setMode('permissive');
|
||||
dockerSecurity.config.updateTrustedOnPull = true;
|
||||
|
||||
const result = await dockerSecurity.verifyImageDigest('newimage:v1', 'sha256:new123');
|
||||
|
||||
expect(result.verified).toBe(true);
|
||||
expect(result.action).toBe('accept');
|
||||
expect(dockerSecurity.config.trustedDigests['newimage:v1']).toBe('sha256:new123');
|
||||
});
|
||||
|
||||
test('should not auto-trust when updateTrustedOnPull disabled', async () => {
|
||||
dockerSecurity.config.updateTrustedOnPull = false;
|
||||
|
||||
const result = await dockerSecurity.verifyImageDigest('newimage:v2', 'sha256:new456');
|
||||
|
||||
expect(result.verified).toBe(true);
|
||||
expect(dockerSecurity.config.trustedDigests['newimage:v2']).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should match base image name when tag not in config', async () => {
|
||||
// Config has 'redis' (no tag), test 'redis:alpine'
|
||||
dockerSecurity.setTrustedDigest('redis', 'sha256:base');
|
||||
|
||||
const result = await dockerSecurity.verifyImageDigest('redis:alpine', 'sha256:base');
|
||||
|
||||
expect(result.verified).toBe(true);
|
||||
expect(result.action).toBe('accept');
|
||||
});
|
||||
|
||||
test('should prefer specific tag over base name', async () => {
|
||||
dockerSecurity.setTrustedDigest('redis', 'sha256:base');
|
||||
dockerSecurity.setTrustedDigest('redis:alpine', 'sha256:alpine');
|
||||
|
||||
const result = await dockerSecurity.verifyImageDigest('redis:alpine', 'sha256:alpine');
|
||||
|
||||
expect(result.verified).toBe(true);
|
||||
expect(result.trustedDigest).toBe('sha256:alpine');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Status Reporting', () => {
|
||||
test('should return security status', () => {
|
||||
const status = dockerSecurity.getStatus();
|
||||
|
||||
expect(status).toHaveProperty('mode');
|
||||
expect(status).toHaveProperty('trustedImagesCount');
|
||||
expect(status).toHaveProperty('configFile');
|
||||
expect(status).toHaveProperty('updateTrustedOnPull');
|
||||
});
|
||||
|
||||
test('should count trusted images correctly', () => {
|
||||
const initialCount = dockerSecurity.getStatus().trustedImagesCount;
|
||||
|
||||
dockerSecurity.setTrustedDigest('count-test-1', 'sha256:1');
|
||||
dockerSecurity.setTrustedDigest('count-test-2', 'sha256:2');
|
||||
dockerSecurity.setTrustedDigest('count-test-3', 'sha256:3');
|
||||
|
||||
const status = dockerSecurity.getStatus();
|
||||
|
||||
expect(status.trustedImagesCount).toBe(initialCount + 3);
|
||||
});
|
||||
|
||||
test('should reflect current mode', () => {
|
||||
dockerSecurity.setMode('strict');
|
||||
expect(dockerSecurity.getStatus().mode).toBe('strict');
|
||||
|
||||
dockerSecurity.setMode('verify');
|
||||
expect(dockerSecurity.getStatus().mode).toBe('verify');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases & Security Boundaries', () => {
|
||||
test('should handle empty digest string in strict mode', async () => {
|
||||
dockerSecurity.setMode('strict');
|
||||
dockerSecurity.setTrustedDigest('test:latest', 'sha256:trusted');
|
||||
|
||||
const result = await dockerSecurity.verifyImageDigest('test:latest', '');
|
||||
|
||||
expect(result.verified).toBe(false);
|
||||
expect(result.action).toBe('reject');
|
||||
});
|
||||
|
||||
test('should handle null/undefined digest in strict mode', async () => {
|
||||
dockerSecurity.setMode('strict');
|
||||
dockerSecurity.setTrustedDigest('test:latest', 'sha256:trusted');
|
||||
|
||||
const result1 = await dockerSecurity.verifyImageDigest('test:latest', null);
|
||||
const result2 = await dockerSecurity.verifyImageDigest('test:latest', undefined);
|
||||
|
||||
expect(result1.verified).toBe(false);
|
||||
expect(result2.verified).toBe(false);
|
||||
});
|
||||
|
||||
test('should not expose sensitive data in verification result', async () => {
|
||||
dockerSecurity.setTrustedDigest('nginx:latest', 'sha256:trusted123');
|
||||
|
||||
const result = await dockerSecurity.verifyImageDigest('nginx:latest', 'sha256:trusted123');
|
||||
|
||||
// Should not contain file paths, internal state, etc
|
||||
const resultJson = JSON.stringify(result);
|
||||
expect(resultJson).not.toContain(TEST_CONFIG_PATH);
|
||||
expect(result).not.toHaveProperty('config');
|
||||
expect(result).not.toHaveProperty('_internal');
|
||||
});
|
||||
|
||||
test('should handle concurrent digest updates safely', () => {
|
||||
// Add multiple digests rapidly
|
||||
for (let i = 0; i < 10; i++) {
|
||||
dockerSecurity.setTrustedDigest(`image${i}`, `sha256:digest${i}`);
|
||||
}
|
||||
|
||||
// All should be present
|
||||
for (let i = 0; i < 10; i++) {
|
||||
expect(dockerSecurity.config.trustedDigests[`image${i}`]).toBe(`sha256:digest${i}`);
|
||||
}
|
||||
});
|
||||
|
||||
test('should handle removal of non-existent digest', () => {
|
||||
expect(() => {
|
||||
dockerSecurity.removeTrustedDigest('nonexistent:latest');
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
test('should validate digest format (basic)', async () => {
|
||||
dockerSecurity.setMode('verify');
|
||||
|
||||
// These should all work (verification logic doesn't enforce sha256: prefix)
|
||||
const result1 = await dockerSecurity.verifyImageDigest('test:1', 'sha256:abc123');
|
||||
const result2 = await dockerSecurity.verifyImageDigest('test:2', 'localid123');
|
||||
|
||||
expect(result1.actualDigest).toBe('sha256:abc123');
|
||||
expect(result2.actualDigest).toBe('localid123');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Verification Result Structure', () => {
|
||||
test('should include all expected fields in result', async () => {
|
||||
dockerSecurity.setTrustedDigest('test:v1', 'sha256:trusted');
|
||||
|
||||
const result = await dockerSecurity.verifyImageDigest('test:v1', 'sha256:trusted');
|
||||
|
||||
expect(result).toHaveProperty('verified');
|
||||
expect(result).toHaveProperty('mode');
|
||||
expect(result).toHaveProperty('imageName');
|
||||
expect(result).toHaveProperty('actualDigest');
|
||||
expect(result).toHaveProperty('trustedDigest');
|
||||
expect(result).toHaveProperty('action');
|
||||
expect(result).toHaveProperty('reason');
|
||||
});
|
||||
|
||||
test('should set trustedDigest to null when not configured', async () => {
|
||||
const result = await dockerSecurity.verifyImageDigest('unknown:v1', 'sha256:test');
|
||||
|
||||
expect(result.trustedDigest).toBeNull();
|
||||
});
|
||||
|
||||
test('should preserve imageName and actualDigest in result', async () => {
|
||||
const imageName = 'myapp:1.2.3';
|
||||
const digest = 'sha256:abcdef123456';
|
||||
|
||||
const result = await dockerSecurity.verifyImageDigest(imageName, digest);
|
||||
|
||||
expect(result.imageName).toBe(imageName);
|
||||
expect(result.actualDigest).toBe(digest);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,604 +0,0 @@
|
||||
/**
|
||||
* Edge Case Tests
|
||||
*
|
||||
* Tests boundary conditions, invalid inputs, and extreme scenarios
|
||||
* Validates system behavior under unusual or stressful conditions
|
||||
*/
|
||||
|
||||
const request = require('supertest');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
// Create test instance with isolated environment
|
||||
const testServicesFile = path.join(os.tmpdir(), `edge-services-${Date.now()}.json`);
|
||||
const testConfigFile = path.join(os.tmpdir(), `edge-config-${Date.now()}.json`);
|
||||
|
||||
// Set test environment
|
||||
process.env.SERVICES_FILE = testServicesFile;
|
||||
process.env.CONFIG_FILE = testConfigFile;
|
||||
process.env.ENABLE_HEALTH_CHECKER = 'false';
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
// Initialize test files
|
||||
fs.writeFileSync(testServicesFile, '[]', 'utf8');
|
||||
fs.writeFileSync(testConfigFile, '{}', 'utf8');
|
||||
|
||||
// Require app after environment setup
|
||||
const app = require('../server');
|
||||
|
||||
describe('Edge Case Tests', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
// Reset state through the API to respect file locks
|
||||
await request(app).put('/api/services').send([]);
|
||||
fs.writeFileSync(testConfigFile, '{}', 'utf8');
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Cleanup test files
|
||||
try {
|
||||
fs.unlinkSync(testServicesFile);
|
||||
fs.unlinkSync(testConfigFile);
|
||||
} catch (e) {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
});
|
||||
|
||||
describe('Boundary Conditions', () => {
|
||||
test('should handle empty service ID', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: '', name: 'Empty ID Service' });
|
||||
|
||||
// Should reject empty ID
|
||||
expect(res.statusCode).toBeGreaterThanOrEqual(400);
|
||||
});
|
||||
|
||||
test('should handle very long service ID (1000 chars)', async () => {
|
||||
const longId = 'a'.repeat(1000);
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: longId, name: 'Long ID' });
|
||||
|
||||
// Might accept or reject depending on validation
|
||||
expect([200, 400, 413]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should handle very long service name (10000 chars)', async () => {
|
||||
const longName = 'Name '.repeat(2000);
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'test', name: longName });
|
||||
|
||||
// Should handle gracefully
|
||||
expect([200, 400, 413]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should handle service with exactly 0 properties', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({});
|
||||
|
||||
// Should reject - missing required fields
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test('should handle service with 100+ properties', async () => {
|
||||
const service = { id: 'many-props', name: 'Many Props' };
|
||||
for (let i = 0; i < 100; i++) {
|
||||
service[`prop${i}`] = `value${i}`;
|
||||
}
|
||||
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send(service);
|
||||
|
||||
// Should handle extra properties gracefully
|
||||
expect([200, 400]).toContain(res.statusCode);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid Input Types', () => {
|
||||
test('should handle null service ID', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: null, name: 'Null ID' });
|
||||
|
||||
expect(res.statusCode).toBeGreaterThanOrEqual(400);
|
||||
});
|
||||
|
||||
test('should handle number as service ID', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 12345, name: 'Number ID' });
|
||||
|
||||
// Might convert to string or reject
|
||||
expect([200, 400]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should handle array as service ID', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: ['array', 'id'], name: 'Array ID' });
|
||||
|
||||
expect(res.statusCode).toBeGreaterThanOrEqual(400);
|
||||
});
|
||||
|
||||
test('should handle object as service ID', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: { nested: 'object' }, name: 'Object ID' });
|
||||
|
||||
expect(res.statusCode).toBeGreaterThanOrEqual(400);
|
||||
});
|
||||
|
||||
test('should handle boolean as service name', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'bool-test', name: true });
|
||||
|
||||
// Might convert to string or reject
|
||||
expect([200, 400]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should handle undefined properties', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'test', name: undefined });
|
||||
|
||||
expect(res.statusCode).toBeGreaterThanOrEqual(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Special Characters and Encoding', () => {
|
||||
test('should handle Unicode characters in service name', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'unicode', name: '🚀 Rocket Service 中文 العربية' });
|
||||
|
||||
// Should handle Unicode properly
|
||||
expect([200, 400]).toContain(res.statusCode);
|
||||
|
||||
if (res.statusCode === 200) {
|
||||
const services = JSON.parse(fs.readFileSync(testServicesFile, 'utf8'));
|
||||
expect(services[0].name).toContain('🚀');
|
||||
}
|
||||
});
|
||||
|
||||
test('should handle special characters in ID', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'test!@#$%^&*()', name: 'Special ID' });
|
||||
|
||||
// Might sanitize or reject
|
||||
expect([200, 400]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should handle newlines in service name', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'newline', name: 'Line 1\nLine 2\nLine 3' });
|
||||
|
||||
expect([200, 400]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should handle SQL injection attempt in ID', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: "'; DROP TABLE services; --", name: 'SQL Injection' });
|
||||
|
||||
// Should reject or sanitize
|
||||
expect([200, 400]).toContain(res.statusCode);
|
||||
|
||||
// Verify file is still valid JSON
|
||||
const content = fs.readFileSync(testServicesFile, 'utf8');
|
||||
expect(() => JSON.parse(content)).not.toThrow();
|
||||
});
|
||||
|
||||
test('should handle path traversal attempt in logo', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({
|
||||
id: 'path-traversal',
|
||||
name: 'Path Traversal',
|
||||
logo: '../../../../../../etc/passwd',
|
||||
});
|
||||
|
||||
// Should handle safely
|
||||
expect([200, 400]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should handle null bytes in input', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'null\x00byte', name: 'Test\x00Name' });
|
||||
|
||||
// Should reject or sanitize
|
||||
expect([200, 400]).toContain(res.statusCode);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Large Datasets', () => {
|
||||
test('should handle 100 services', async () => {
|
||||
// Add 100 services
|
||||
for (let i = 0; i < 100; i++) {
|
||||
await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: `service-${i}`, name: `Service ${i}` });
|
||||
}
|
||||
|
||||
// Verify all exist
|
||||
const res = await request(app).get('/api/services');
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.length).toBe(100);
|
||||
}, 60000);
|
||||
|
||||
test('should handle deleting from large dataset', async () => {
|
||||
// Add 50 services
|
||||
for (let i = 0; i < 50; i++) {
|
||||
await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: `bulk-${i}`, name: `Bulk ${i}` });
|
||||
}
|
||||
|
||||
// Delete 25 services
|
||||
for (let i = 0; i < 25; i++) {
|
||||
await request(app).delete(`/api/services/bulk-${i}`);
|
||||
}
|
||||
|
||||
// Verify 25 remain
|
||||
const res = await request(app).get('/api/services');
|
||||
expect(res.body.length).toBe(25);
|
||||
}, 30000);
|
||||
|
||||
test('should handle bulk import of 200 services', async () => {
|
||||
const bulkServices = Array.from({ length: 200 }, (_, i) => ({
|
||||
id: `bulk-${i}`,
|
||||
name: `Bulk Service ${i}`,
|
||||
}));
|
||||
|
||||
const res = await request(app)
|
||||
.put('/api/services')
|
||||
.send(bulkServices);
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
|
||||
// Verify all imported
|
||||
const getRes = await request(app).get('/api/services');
|
||||
expect(getRes.body.length).toBe(200);
|
||||
}, 10000); // Longer timeout
|
||||
|
||||
test('should handle service with very large property value (1MB)', async () => {
|
||||
const largeData = 'x'.repeat(1024 * 1024); // 1MB string
|
||||
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({
|
||||
id: 'large-data',
|
||||
name: 'Large Data',
|
||||
description: largeData,
|
||||
});
|
||||
|
||||
// Might reject due to size
|
||||
expect([200, 413]).toContain(res.statusCode);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Concurrent Operations and Race Conditions', () => {
|
||||
test('should handle 20 concurrent POSTs without corruption', async () => {
|
||||
const promises = Array.from({ length: 20 }, (_, i) =>
|
||||
request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: `concurrent-${i}`, name: `Concurrent ${i}` }),
|
||||
);
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
|
||||
// With file locking, some may fail with 500 (lock contention) — that's expected
|
||||
const successes = results.filter(r => r.statusCode === 200);
|
||||
expect(successes.length).toBeGreaterThanOrEqual(1);
|
||||
|
||||
// The critical check: file must be valid JSON (no corruption)
|
||||
const content = fs.readFileSync(testServicesFile, 'utf8');
|
||||
expect(() => JSON.parse(content)).not.toThrow();
|
||||
|
||||
// And the count must match the number of successes
|
||||
const services = JSON.parse(content);
|
||||
expect(services.length).toBe(successes.length);
|
||||
});
|
||||
|
||||
test('should handle concurrent add and delete of same service', async () => {
|
||||
// Add a service
|
||||
await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'race', name: 'Race Service' });
|
||||
|
||||
// Simultaneously add again and delete
|
||||
const [addRes, deleteRes] = await Promise.all([
|
||||
request(app).post('/api/services').send({ id: 'race', name: 'Race 2' }),
|
||||
request(app).delete('/api/services/race'),
|
||||
]);
|
||||
|
||||
// One should succeed, states should be consistent
|
||||
const services = JSON.parse(fs.readFileSync(testServicesFile, 'utf8'));
|
||||
expect(() => JSON.parse(fs.readFileSync(testServicesFile, 'utf8'))).not.toThrow();
|
||||
});
|
||||
|
||||
test('should handle concurrent bulk imports', async () => {
|
||||
const set1 = [{ id: 's1', name: 'Set 1' }];
|
||||
const set2 = [{ id: 's2', name: 'Set 2' }];
|
||||
|
||||
const [res1, res2] = await Promise.all([
|
||||
request(app).put('/api/services').send(set1),
|
||||
request(app).put('/api/services').send(set2),
|
||||
]);
|
||||
|
||||
// Both operations should complete
|
||||
expect([200]).toContain(res1.statusCode);
|
||||
expect([200]).toContain(res2.statusCode);
|
||||
|
||||
// Final state should have one complete set (last write wins)
|
||||
const services = JSON.parse(fs.readFileSync(testServicesFile, 'utf8'));
|
||||
expect(services.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('File System Edge Cases', () => {
|
||||
test('should handle file with read-only after writing', async () => {
|
||||
// Add a service
|
||||
await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'readonly-test', name: 'Read Only' });
|
||||
|
||||
// Make file read-only
|
||||
fs.chmodSync(testServicesFile, 0o444);
|
||||
|
||||
// Try to add another service
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'should-fail', name: 'Should Fail' });
|
||||
|
||||
// Should fail with 500 error
|
||||
expect(res.statusCode).toBe(500);
|
||||
|
||||
// Restore permissions for cleanup
|
||||
fs.chmodSync(testServicesFile, 0o666);
|
||||
});
|
||||
|
||||
test('should handle missing services file gracefully', async () => {
|
||||
// Delete the file
|
||||
fs.unlinkSync(testServicesFile);
|
||||
|
||||
// Try to get services
|
||||
const res = await request(app).get('/api/services');
|
||||
|
||||
// Should either return empty array or create file
|
||||
expect([200, 500]).toContain(res.statusCode);
|
||||
|
||||
// File should be recreated or error handled
|
||||
if (res.statusCode === 200) {
|
||||
expect(Array.isArray(res.body)).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
test('should handle empty file (0 bytes)', async () => {
|
||||
// Create empty file
|
||||
fs.writeFileSync(testServicesFile, '', 'utf8');
|
||||
|
||||
const res = await request(app).get('/api/services');
|
||||
|
||||
// Should handle gracefully
|
||||
expect([200, 500]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should handle file with only whitespace', async () => {
|
||||
fs.writeFileSync(testServicesFile, ' \n\t\r ', 'utf8');
|
||||
|
||||
const res = await request(app).get('/api/services');
|
||||
|
||||
// Should handle gracefully
|
||||
expect([200, 500]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should handle file with BOM (Byte Order Mark)', async () => {
|
||||
const bomContent = '\uFEFF[]';
|
||||
fs.writeFileSync(testServicesFile, bomContent, 'utf8');
|
||||
|
||||
const res = await request(app).get('/api/services');
|
||||
|
||||
// BOM may cause JSON parse to fail (500) or be handled (200)
|
||||
expect([200, 500]).toContain(res.statusCode);
|
||||
if (res.statusCode === 200) {
|
||||
expect(Array.isArray(res.body)).toBe(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('API Request Edge Cases', () => {
|
||||
test('should handle missing Content-Type header', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.set('Content-Type', '')
|
||||
.send('{"id":"test","name":"Test"}');
|
||||
|
||||
// Should handle gracefully
|
||||
expect([200, 400]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should handle wrong Content-Type (text/plain)', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.set('Content-Type', 'text/plain')
|
||||
.send('{"id":"test","name":"Test"}');
|
||||
|
||||
// Might still parse or reject
|
||||
expect([200, 400, 415]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should handle extremely nested JSON (50 levels)', async () => {
|
||||
let nested = { value: 'deep' };
|
||||
for (let i = 0; i < 50; i++) {
|
||||
nested = { level: nested };
|
||||
}
|
||||
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'nested', name: 'Nested', data: nested });
|
||||
|
||||
// Should handle or reject
|
||||
expect([200, 400]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should handle request with circular reference (if possible)', async () => {
|
||||
// Can't send actual circular JSON, but test large nested structure
|
||||
const data = { id: 'circular', name: 'Test' };
|
||||
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send(data);
|
||||
|
||||
expect([200, 400]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should handle double-encoded JSON', async () => {
|
||||
const doubleEncoded = JSON.stringify(
|
||||
JSON.stringify({ id: 'double', name: 'Double Encoded' }),
|
||||
);
|
||||
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.set('Content-Type', 'application/json')
|
||||
.send(doubleEncoded);
|
||||
|
||||
// Should reject - wrong format
|
||||
expect([400, 500]).toContain(res.statusCode);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Template Edge Cases', () => {
|
||||
test('should handle requesting template with special chars in ID', async () => {
|
||||
const res = await request(app).get('/api/apps/templates/test%20space');
|
||||
|
||||
expect([404, 400]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should handle requesting template with very long ID', async () => {
|
||||
const longId = 'a'.repeat(1000);
|
||||
const res = await request(app).get(`/api/apps/templates/${longId}`);
|
||||
|
||||
expect([404, 414]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should handle template with path traversal', async () => {
|
||||
const res = await request(app).get('/api/apps/templates/../../secrets');
|
||||
|
||||
expect([404, 400]).toContain(res.statusCode);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Configuration Edge Cases', () => {
|
||||
test('should handle empty configuration object', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/config')
|
||||
.send({});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
|
||||
// Verify empty config saved
|
||||
const config = JSON.parse(fs.readFileSync(testConfigFile, 'utf8'));
|
||||
expect(typeof config).toBe('object');
|
||||
});
|
||||
|
||||
test('should handle configuration with 1000 properties', async () => {
|
||||
const largeConfig = {};
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
largeConfig[`setting${i}`] = `value${i}`;
|
||||
}
|
||||
|
||||
const res = await request(app)
|
||||
.post('/api/config')
|
||||
.send(largeConfig);
|
||||
|
||||
expect([200, 413]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should handle configuration with nested arrays', async () => {
|
||||
const config = {
|
||||
nested: [[['deep', 'array'], ['values']], [['more']]],
|
||||
};
|
||||
|
||||
const res = await request(app)
|
||||
.post('/api/config')
|
||||
.send(config);
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Delete Edge Cases', () => {
|
||||
test('should handle deleting non-existent service', async () => {
|
||||
const res = await request(app).delete('/api/services/does-not-exist');
|
||||
|
||||
expect(res.statusCode).toBe(404);
|
||||
});
|
||||
|
||||
test('should handle deleting with special characters in ID', async () => {
|
||||
const res = await request(app).delete('/api/services/test%2Fslash');
|
||||
|
||||
expect([404, 400]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should handle deleting same service twice simultaneously', async () => {
|
||||
// Add service
|
||||
await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'delete-me', name: 'Delete Me' });
|
||||
|
||||
// Delete twice at once
|
||||
const [res1, res2] = await Promise.all([
|
||||
request(app).delete('/api/services/delete-me'),
|
||||
request(app).delete('/api/services/delete-me'),
|
||||
]);
|
||||
|
||||
// One should succeed (200), one should fail (404)
|
||||
const statuses = [res1.statusCode, res2.statusCode].sort();
|
||||
expect(statuses).toContain(200);
|
||||
expect(statuses).toContain(404);
|
||||
});
|
||||
});
|
||||
|
||||
describe('State Consistency Edge Cases', () => {
|
||||
test('should recover if file becomes corrupted mid-operation', async () => {
|
||||
// Add initial service
|
||||
await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'initial', name: 'Initial' });
|
||||
|
||||
// Corrupt the file
|
||||
fs.writeFileSync(testServicesFile, '{corrupted', 'utf8');
|
||||
|
||||
// Try to read
|
||||
const res = await request(app).get('/api/services');
|
||||
|
||||
// Should handle error gracefully
|
||||
expect([200, 500]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should handle file replaced with directory', async () => {
|
||||
// Delete file
|
||||
fs.unlinkSync(testServicesFile);
|
||||
|
||||
// Create directory with same name
|
||||
fs.mkdirSync(testServicesFile);
|
||||
|
||||
// Try to read
|
||||
const res = await request(app).get('/api/services');
|
||||
|
||||
expect(res.statusCode).toBe(500);
|
||||
|
||||
// Cleanup
|
||||
fs.rmdirSync(testServicesFile);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,70 +0,0 @@
|
||||
/**
|
||||
* Error Log and Audit Log Route Tests
|
||||
*
|
||||
* Tests error log retrieval/clearing and audit log retrieval/clearing
|
||||
*/
|
||||
|
||||
const request = require('supertest');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
const testServicesFile = path.join(os.tmpdir(), `errorlogs-services-${Date.now()}.json`);
|
||||
const testConfigFile = path.join(os.tmpdir(), `errorlogs-config-${Date.now()}.json`);
|
||||
|
||||
process.env.SERVICES_FILE = testServicesFile;
|
||||
process.env.CONFIG_FILE = testConfigFile;
|
||||
process.env.ENABLE_HEALTH_CHECKER = 'false';
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
fs.writeFileSync(testServicesFile, '[]', 'utf8');
|
||||
fs.writeFileSync(testConfigFile, '{}', 'utf8');
|
||||
|
||||
const app = require('../server');
|
||||
|
||||
describe('Error Log and Audit Log Routes', () => {
|
||||
afterAll(() => {
|
||||
try { fs.unlinkSync(testServicesFile); } catch (e) { /* ignore */ }
|
||||
try { fs.unlinkSync(testConfigFile); } catch (e) { /* ignore */ }
|
||||
});
|
||||
|
||||
describe('GET /api/error-logs', () => {
|
||||
test('should return 200 with logs array', async () => {
|
||||
const res = await request(app).get('/api/error-logs');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(Array.isArray(res.body.logs)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/audit-logs', () => {
|
||||
test('should return 200 with entries array', async () => {
|
||||
const res = await request(app).get('/api/audit-logs');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(Array.isArray(res.body.entries)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /api/error-logs', () => {
|
||||
test('should return 200 with success message', async () => {
|
||||
const res = await request(app).delete('/api/error-logs');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body).toHaveProperty('message');
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /api/audit-logs', () => {
|
||||
test('should return 200 with success message', async () => {
|
||||
const res = await request(app).delete('/api/audit-logs');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body).toHaveProperty('message');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,361 +0,0 @@
|
||||
// health-checker.js exports a singleton that reads config/history from disk on construction.
|
||||
// The jest.setup.js suppresses console and the files don't exist in test env, so it falls back to defaults.
|
||||
|
||||
const healthChecker = require('../health-checker');
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset singleton state between tests
|
||||
healthChecker.currentStatus = new Map();
|
||||
healthChecker.incidents = [];
|
||||
healthChecker.history = {};
|
||||
healthChecker.config = { services: {} };
|
||||
healthChecker.checking = false;
|
||||
if (healthChecker.checkInterval) {
|
||||
clearInterval(healthChecker.checkInterval);
|
||||
healthChecker.checkInterval = null;
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
healthChecker.stop();
|
||||
});
|
||||
|
||||
describe('evaluateHealth', () => {
|
||||
test('returns true for status code in expectedStatusCodes', () => {
|
||||
expect(healthChecker.evaluateHealth(200, '', { expectedStatusCodes: [200, 201] })).toBe(true);
|
||||
});
|
||||
|
||||
test('returns false for status code not in expectedStatusCodes', () => {
|
||||
expect(healthChecker.evaluateHealth(500, '', { expectedStatusCodes: [200] })).toBe(false);
|
||||
});
|
||||
|
||||
test('uses default expected codes when not configured', () => {
|
||||
expect(healthChecker.evaluateHealth(200, '', {})).toBe(true);
|
||||
expect(healthChecker.evaluateHealth(301, '', {})).toBe(true);
|
||||
expect(healthChecker.evaluateHealth(500, '', {})).toBe(false);
|
||||
});
|
||||
|
||||
test('returns false when expectedBodyPattern regex does not match', () => {
|
||||
expect(healthChecker.evaluateHealth(200, 'error occurred', {
|
||||
expectedBodyPattern: 'ok|healthy',
|
||||
})).toBe(false);
|
||||
});
|
||||
|
||||
test('returns true when expectedBodyPattern regex matches', () => {
|
||||
expect(healthChecker.evaluateHealth(200, 'status: healthy', {
|
||||
expectedBodyPattern: 'healthy',
|
||||
})).toBe(true);
|
||||
});
|
||||
|
||||
test('returns false when expectedBodyContains text is missing', () => {
|
||||
expect(healthChecker.evaluateHealth(200, 'some response', {
|
||||
expectedBodyContains: 'healthy',
|
||||
})).toBe(false);
|
||||
});
|
||||
|
||||
test('returns true when expectedBodyContains text is present', () => {
|
||||
expect(healthChecker.evaluateHealth(200, 'service is healthy', {
|
||||
expectedBodyContains: 'healthy',
|
||||
})).toBe(true);
|
||||
});
|
||||
|
||||
test('checks all conditions: status code AND body pattern AND body contains', () => {
|
||||
// All pass
|
||||
expect(healthChecker.evaluateHealth(200, 'healthy ok', {
|
||||
expectedStatusCodes: [200],
|
||||
expectedBodyPattern: 'healthy',
|
||||
expectedBodyContains: 'ok',
|
||||
})).toBe(true);
|
||||
|
||||
// Status fails
|
||||
expect(healthChecker.evaluateHealth(500, 'healthy ok', {
|
||||
expectedStatusCodes: [200],
|
||||
expectedBodyPattern: 'healthy',
|
||||
expectedBodyContains: 'ok',
|
||||
})).toBe(false);
|
||||
|
||||
// Body pattern fails
|
||||
expect(healthChecker.evaluateHealth(200, 'error', {
|
||||
expectedStatusCodes: [200],
|
||||
expectedBodyPattern: 'healthy',
|
||||
expectedBodyContains: 'error',
|
||||
})).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateSeverity', () => {
|
||||
test('returns critical for outage', () => {
|
||||
expect(healthChecker.calculateSeverity('outage')).toBe('critical');
|
||||
});
|
||||
|
||||
test('returns high for sla-violation', () => {
|
||||
expect(healthChecker.calculateSeverity('sla-violation')).toBe('high');
|
||||
});
|
||||
|
||||
test('returns medium for slow-response', () => {
|
||||
expect(healthChecker.calculateSeverity('slow-response')).toBe('medium');
|
||||
});
|
||||
|
||||
test('returns low for unknown type', () => {
|
||||
expect(healthChecker.calculateSeverity('unknown')).toBe('low');
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateUptime', () => {
|
||||
test('returns 100 when no history', () => {
|
||||
expect(healthChecker.calculateUptime('svc1')).toBe(100);
|
||||
});
|
||||
|
||||
test('returns 100 when all checks are up', () => {
|
||||
const now = new Date().toISOString();
|
||||
healthChecker.history['svc1'] = [
|
||||
{ status: 'up', timestamp: now },
|
||||
{ status: 'up', timestamp: now },
|
||||
{ status: 'up', timestamp: now },
|
||||
];
|
||||
expect(healthChecker.calculateUptime('svc1')).toBe(100);
|
||||
});
|
||||
|
||||
test('returns 0 when all checks are down', () => {
|
||||
const now = new Date().toISOString();
|
||||
healthChecker.history['svc1'] = [
|
||||
{ status: 'down', timestamp: now },
|
||||
{ status: 'down', timestamp: now },
|
||||
];
|
||||
expect(healthChecker.calculateUptime('svc1')).toBe(0);
|
||||
});
|
||||
|
||||
test('returns 50 when half are up', () => {
|
||||
const now = new Date().toISOString();
|
||||
healthChecker.history['svc1'] = [
|
||||
{ status: 'up', timestamp: now },
|
||||
{ status: 'down', timestamp: now },
|
||||
];
|
||||
expect(healthChecker.calculateUptime('svc1')).toBe(50);
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateAverageResponseTime', () => {
|
||||
test('returns 0 when no history', () => {
|
||||
expect(healthChecker.calculateAverageResponseTime('svc1')).toBe(0);
|
||||
});
|
||||
|
||||
test('calculates correct average', () => {
|
||||
const now = new Date().toISOString();
|
||||
healthChecker.history['svc1'] = [
|
||||
{ responseTime: 100, timestamp: now },
|
||||
{ responseTime: 200, timestamp: now },
|
||||
{ responseTime: 300, timestamp: now },
|
||||
];
|
||||
expect(healthChecker.calculateAverageResponseTime('svc1')).toBe(200);
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculatePercentile', () => {
|
||||
test('returns p95 correctly', () => {
|
||||
const values = Array.from({ length: 100 }, (_, i) => i + 1);
|
||||
expect(healthChecker.calculatePercentile(values, 95)).toBe(95);
|
||||
});
|
||||
|
||||
test('returns p99 correctly', () => {
|
||||
const values = Array.from({ length: 100 }, (_, i) => i + 1);
|
||||
expect(healthChecker.calculatePercentile(values, 99)).toBe(99);
|
||||
});
|
||||
|
||||
test('returns 0 for empty array', () => {
|
||||
expect(healthChecker.calculatePercentile([], 95)).toBe(0);
|
||||
});
|
||||
|
||||
test('handles single-element array', () => {
|
||||
expect(healthChecker.calculatePercentile([42], 95)).toBe(42);
|
||||
});
|
||||
|
||||
test('sorts values before calculating', () => {
|
||||
const unsorted = [50, 10, 90, 30, 70, 20, 80, 40, 60, 100];
|
||||
expect(healthChecker.calculatePercentile(unsorted, 50)).toBe(50);
|
||||
});
|
||||
});
|
||||
|
||||
describe('recordStatus', () => {
|
||||
test('adds status to currentStatus map', () => {
|
||||
const status = { serviceId: 'svc1', status: 'up', timestamp: new Date().toISOString() };
|
||||
healthChecker.recordStatus('svc1', status);
|
||||
expect(healthChecker.currentStatus.get('svc1')).toEqual(status);
|
||||
});
|
||||
|
||||
test('creates history array for new serviceId', () => {
|
||||
const status = { serviceId: 'new-svc', status: 'up', timestamp: new Date().toISOString() };
|
||||
healthChecker.recordStatus('new-svc', status);
|
||||
expect(healthChecker.history['new-svc']).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('appends to existing history', () => {
|
||||
healthChecker.history['svc1'] = [{ status: 'up', timestamp: new Date().toISOString() }];
|
||||
const status = { status: 'down', timestamp: new Date().toISOString() };
|
||||
healthChecker.recordStatus('svc1', status);
|
||||
expect(healthChecker.history['svc1']).toHaveLength(2);
|
||||
});
|
||||
|
||||
test('emits status-check event', () => {
|
||||
const handler = jest.fn();
|
||||
healthChecker.on('status-check', handler);
|
||||
healthChecker.recordStatus('svc1', { status: 'up', timestamp: new Date().toISOString() });
|
||||
expect(handler).toHaveBeenCalled();
|
||||
healthChecker.removeListener('status-check', handler);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createIncident', () => {
|
||||
test('creates incident with correct structure', () => {
|
||||
const status = { timestamp: new Date().toISOString() };
|
||||
healthChecker.createIncident('svc1', 'outage', 'Service down', status);
|
||||
expect(healthChecker.incidents).toHaveLength(1);
|
||||
expect(healthChecker.incidents[0].serviceId).toBe('svc1');
|
||||
expect(healthChecker.incidents[0].type).toBe('outage');
|
||||
expect(healthChecker.incidents[0].status).toBe('open');
|
||||
expect(healthChecker.incidents[0].severity).toBe('critical');
|
||||
expect(healthChecker.incidents[0].occurrences).toBe(1);
|
||||
});
|
||||
|
||||
test('emits incident-created event', () => {
|
||||
const handler = jest.fn();
|
||||
healthChecker.on('incident-created', handler);
|
||||
healthChecker.createIncident('svc1', 'outage', 'Down', { timestamp: new Date().toISOString() });
|
||||
expect(handler).toHaveBeenCalledWith(expect.objectContaining({ serviceId: 'svc1' }));
|
||||
healthChecker.removeListener('incident-created', handler);
|
||||
});
|
||||
|
||||
test('does not duplicate open incidents of same type', () => {
|
||||
const status = { timestamp: new Date().toISOString() };
|
||||
healthChecker.createIncident('svc1', 'outage', 'Down', status);
|
||||
healthChecker.createIncident('svc1', 'outage', 'Still down', status);
|
||||
expect(healthChecker.incidents).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('increments occurrences on existing open incident', () => {
|
||||
const status = { timestamp: new Date().toISOString() };
|
||||
healthChecker.createIncident('svc1', 'outage', 'Down', status);
|
||||
healthChecker.createIncident('svc1', 'outage', 'Still down', status);
|
||||
expect(healthChecker.incidents[0].occurrences).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('resolveIncident', () => {
|
||||
test('marks incident as resolved with duration', () => {
|
||||
const created = new Date(Date.now() - 60000).toISOString();
|
||||
const resolved = new Date().toISOString();
|
||||
healthChecker.createIncident('svc1', 'outage', 'Down', { timestamp: created });
|
||||
healthChecker.resolveIncident('svc1', 'outage', { timestamp: resolved });
|
||||
expect(healthChecker.incidents[0].status).toBe('resolved');
|
||||
expect(healthChecker.incidents[0].resolvedAt).toBe(resolved);
|
||||
expect(healthChecker.incidents[0].duration).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test('emits incident-resolved event', () => {
|
||||
const handler = jest.fn();
|
||||
healthChecker.on('incident-resolved', handler);
|
||||
const ts = new Date().toISOString();
|
||||
healthChecker.createIncident('svc1', 'outage', 'Down', { timestamp: ts });
|
||||
healthChecker.resolveIncident('svc1', 'outage', { timestamp: ts });
|
||||
expect(handler).toHaveBeenCalled();
|
||||
healthChecker.removeListener('incident-resolved', handler);
|
||||
});
|
||||
|
||||
test('handles no matching incident gracefully', () => {
|
||||
// Should not throw
|
||||
healthChecker.resolveIncident('nonexistent', 'outage', { timestamp: new Date().toISOString() });
|
||||
expect(healthChecker.incidents).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('configureService / removeService', () => {
|
||||
test('adds service config with defaults', () => {
|
||||
healthChecker.configureService('svc1', { url: 'http://localhost:3000', name: 'Test' });
|
||||
expect(healthChecker.config.services['svc1']).toBeDefined();
|
||||
expect(healthChecker.config.services['svc1'].method).toBe('GET');
|
||||
expect(healthChecker.config.services['svc1'].timeout).toBe(10000);
|
||||
});
|
||||
|
||||
test('removes service and cleans up', () => {
|
||||
healthChecker.configureService('svc1', { url: 'http://localhost:3000' });
|
||||
healthChecker.currentStatus.set('svc1', { status: 'up' });
|
||||
healthChecker.history['svc1'] = [{ status: 'up' }];
|
||||
|
||||
healthChecker.removeService('svc1');
|
||||
expect(healthChecker.config.services['svc1']).toBeUndefined();
|
||||
expect(healthChecker.currentStatus.has('svc1')).toBe(false);
|
||||
expect(healthChecker.history['svc1']).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getOpenIncidents / getIncidentHistory', () => {
|
||||
test('getOpenIncidents returns only open incidents', () => {
|
||||
const ts = new Date().toISOString();
|
||||
healthChecker.createIncident('svc1', 'outage', 'Down', { timestamp: ts });
|
||||
healthChecker.createIncident('svc2', 'slow-response', 'Slow', { timestamp: ts });
|
||||
healthChecker.resolveIncident('svc1', 'outage', { timestamp: ts });
|
||||
expect(healthChecker.getOpenIncidents()).toHaveLength(1);
|
||||
expect(healthChecker.getOpenIncidents()[0].serviceId).toBe('svc2');
|
||||
});
|
||||
|
||||
test('getIncidentHistory returns reverse chronological order', () => {
|
||||
const ts = new Date().toISOString();
|
||||
healthChecker.createIncident('svc1', 'outage', 'First', { timestamp: ts });
|
||||
healthChecker.createIncident('svc2', 'outage', 'Second', { timestamp: ts });
|
||||
const history = healthChecker.getIncidentHistory();
|
||||
expect(history[0].serviceId).toBe('svc2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getServiceStats', () => {
|
||||
test('returns null for service with no history', () => {
|
||||
expect(healthChecker.getServiceStats('nonexistent')).toBeNull();
|
||||
});
|
||||
|
||||
test('returns correct stats structure', () => {
|
||||
const now = new Date().toISOString();
|
||||
healthChecker.history['svc1'] = [
|
||||
{ status: 'up', responseTime: 100, timestamp: now },
|
||||
{ status: 'up', responseTime: 200, timestamp: now },
|
||||
{ status: 'down', responseTime: 0, timestamp: now },
|
||||
];
|
||||
const stats = healthChecker.getServiceStats('svc1');
|
||||
expect(stats.totalChecks).toBe(3);
|
||||
expect(stats.upChecks).toBe(2);
|
||||
expect(stats.downChecks).toBe(1);
|
||||
expect(stats.responseTime.avg).toBe(100);
|
||||
expect(stats.responseTime.min).toBe(0);
|
||||
expect(stats.responseTime.max).toBe(200);
|
||||
expect(stats.responseTime).toHaveProperty('p95');
|
||||
expect(stats.responseTime).toHaveProperty('p99');
|
||||
});
|
||||
});
|
||||
|
||||
describe('start / stop', () => {
|
||||
test('start sets checking flag', () => {
|
||||
jest.useFakeTimers();
|
||||
healthChecker.start();
|
||||
expect(healthChecker.checking).toBe(true);
|
||||
healthChecker.stop();
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
test('stop clears interval and checking flag', () => {
|
||||
jest.useFakeTimers();
|
||||
healthChecker.start();
|
||||
healthChecker.stop();
|
||||
expect(healthChecker.checking).toBe(false);
|
||||
expect(healthChecker.checkInterval).toBeNull();
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
test('start is idempotent', () => {
|
||||
jest.useFakeTimers();
|
||||
healthChecker.start();
|
||||
const firstInterval = healthChecker.checkInterval;
|
||||
healthChecker.start();
|
||||
expect(healthChecker.checkInterval).toBe(firstInterval);
|
||||
healthChecker.stop();
|
||||
jest.useRealTimers();
|
||||
});
|
||||
});
|
||||
@@ -1,727 +0,0 @@
|
||||
const {
|
||||
ValidationError,
|
||||
validateDNSRecord,
|
||||
validateDockerDeployment,
|
||||
validateFilePath,
|
||||
validateVolumePath,
|
||||
validateURL,
|
||||
validateToken,
|
||||
validateServiceConfig,
|
||||
sanitizeString,
|
||||
isValidPort,
|
||||
isPrivateIP,
|
||||
} = require('../input-validator');
|
||||
|
||||
// Helper: extract .errors from ValidationError
|
||||
function getErrors(fn) {
|
||||
try {
|
||||
fn();
|
||||
return null;
|
||||
} catch (e) {
|
||||
return e;
|
||||
}
|
||||
}
|
||||
|
||||
describe('ValidationError', () => {
|
||||
test('creates error with message and field', () => {
|
||||
const err = new ValidationError('bad input', 'name');
|
||||
expect(err.message).toBe('bad input');
|
||||
expect(err.field).toBe('name');
|
||||
});
|
||||
|
||||
test('has statusCode 400', () => {
|
||||
expect(new ValidationError('x').statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test('has name "ValidationError"', () => {
|
||||
expect(new ValidationError('x').name).toBe('ValidationError');
|
||||
});
|
||||
|
||||
test('defaults field to null', () => {
|
||||
expect(new ValidationError('x').field).toBeNull();
|
||||
});
|
||||
|
||||
test('is instance of Error', () => {
|
||||
expect(new ValidationError('x')).toBeInstanceOf(Error);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateDNSRecord', () => {
|
||||
const valid = { subdomain: 'myapp', ip: '192.168.1.1' };
|
||||
|
||||
describe('valid inputs', () => {
|
||||
test('accepts valid subdomain and ip', () => {
|
||||
const result = validateDNSRecord(valid);
|
||||
expect(result.subdomain).toBe('myapp');
|
||||
expect(result.ip).toBe('192.168.1.1');
|
||||
});
|
||||
|
||||
test('returns sanitized lowercase output', () => {
|
||||
const result = validateDNSRecord({ subdomain: 'MyApp', ip: '10.0.0.1' });
|
||||
expect(result.subdomain).toBe('myapp');
|
||||
});
|
||||
|
||||
test('defaults ttl to 3600 when not provided', () => {
|
||||
expect(validateDNSRecord(valid).ttl).toBe(3600);
|
||||
});
|
||||
|
||||
test('accepts explicit ttl', () => {
|
||||
expect(validateDNSRecord({ ...valid, ttl: 300 }).ttl).toBe(300);
|
||||
});
|
||||
|
||||
test('accepts IPv6 addresses', () => {
|
||||
const result = validateDNSRecord({ subdomain: 'test', ip: '::1' });
|
||||
expect(result.ip).toBe('::1');
|
||||
});
|
||||
|
||||
test('accepts valid domain', () => {
|
||||
const result = validateDNSRecord({ ...valid, domain: 'example.local' });
|
||||
expect(result.domain).toBe('example.local');
|
||||
});
|
||||
|
||||
test('returns null domain when not provided', () => {
|
||||
expect(validateDNSRecord(valid).domain).toBeNull();
|
||||
});
|
||||
|
||||
test('lowercases and trims subdomain in output', () => {
|
||||
const result = validateDNSRecord({ subdomain: 'MyApp', ip: '10.0.0.1' });
|
||||
expect(result.subdomain).toBe('myapp');
|
||||
});
|
||||
});
|
||||
|
||||
describe('subdomain validation', () => {
|
||||
test('rejects missing subdomain', () => {
|
||||
const err = getErrors(() => validateDNSRecord({ ip: '1.2.3.4' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects non-string subdomain', () => {
|
||||
const err = getErrors(() => validateDNSRecord({ subdomain: 123, ip: '1.2.3.4' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects subdomain starting with hyphen', () => {
|
||||
const err = getErrors(() => validateDNSRecord({ subdomain: '-bad', ip: '1.2.3.4' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects subdomain ending with hyphen', () => {
|
||||
const err = getErrors(() => validateDNSRecord({ subdomain: 'bad-', ip: '1.2.3.4' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('accepts single-character subdomain', () => {
|
||||
expect(validateDNSRecord({ subdomain: 'a', ip: '1.2.3.4' }).subdomain).toBe('a');
|
||||
});
|
||||
|
||||
test('accepts subdomain with hyphens in middle', () => {
|
||||
expect(validateDNSRecord({ subdomain: 'my-app', ip: '1.2.3.4' }).subdomain).toBe('my-app');
|
||||
});
|
||||
|
||||
test('rejects subdomain exceeding 63 characters', () => {
|
||||
const long = 'a'.repeat(64);
|
||||
const err = getErrors(() => validateDNSRecord({ subdomain: long, ip: '1.2.3.4' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('injection prevention', () => {
|
||||
const chars = [';', '&', '|', '`', '$', '(', ')', '<', '>', '\n', '\r', '\\'];
|
||||
chars.forEach(char => {
|
||||
test(`rejects "${char === '\n' ? '\\n' : char === '\r' ? '\\r' : char}" in subdomain`, () => {
|
||||
const err = getErrors(() => validateDNSRecord({ subdomain: `test${char}bad`, ip: '1.2.3.4' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('IP validation', () => {
|
||||
test('rejects missing IP', () => {
|
||||
const err = getErrors(() => validateDNSRecord({ subdomain: 'test' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects invalid IP format', () => {
|
||||
const err = getErrors(() => validateDNSRecord({ subdomain: 'test', ip: '999.999.999.999' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects non-string IP', () => {
|
||||
const err = getErrors(() => validateDNSRecord({ subdomain: 'test', ip: 12345 }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('blocks private IP when blockPrivateIPs is true', () => {
|
||||
const err = getErrors(() => validateDNSRecord({ subdomain: 'test', ip: '192.168.1.1', blockPrivateIPs: true }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('allows private IP when blockPrivateIPs is absent', () => {
|
||||
expect(validateDNSRecord({ subdomain: 'test', ip: '192.168.1.1' }).ip).toBe('192.168.1.1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('TTL validation', () => {
|
||||
test('rejects TTL below 60', () => {
|
||||
const err = getErrors(() => validateDNSRecord({ ...valid, ttl: 10 }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects TTL above 86400', () => {
|
||||
const err = getErrors(() => validateDNSRecord({ ...valid, ttl: 100000 }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects non-numeric TTL', () => {
|
||||
const err = getErrors(() => validateDNSRecord({ ...valid, ttl: 'abc' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('accepts TTL at lower boundary (60)', () => {
|
||||
expect(validateDNSRecord({ ...valid, ttl: 60 }).ttl).toBe(60);
|
||||
});
|
||||
|
||||
test('accepts TTL at upper boundary (86400)', () => {
|
||||
expect(validateDNSRecord({ ...valid, ttl: 86400 }).ttl).toBe(86400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('error aggregation', () => {
|
||||
test('returns multiple errors for multiple invalid fields', () => {
|
||||
const err = getErrors(() => validateDNSRecord({ ttl: 1 }));
|
||||
expect(err.errors.length).toBeGreaterThan(1);
|
||||
});
|
||||
|
||||
test('throws ValidationError with .errors array', () => {
|
||||
const err = getErrors(() => validateDNSRecord({}));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
expect(Array.isArray(err.errors)).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateDockerDeployment', () => {
|
||||
const valid = { name: 'myapp', image: 'nginx:latest' };
|
||||
|
||||
describe('valid inputs', () => {
|
||||
test('accepts valid name and image', () => {
|
||||
const result = validateDockerDeployment(valid);
|
||||
expect(result.name).toBe('myapp');
|
||||
expect(result.image).toBe('nginx:latest');
|
||||
});
|
||||
|
||||
test('returns defaults for optional fields', () => {
|
||||
const result = validateDockerDeployment(valid);
|
||||
expect(result.ports).toEqual([]);
|
||||
expect(result.volumes).toEqual([]);
|
||||
expect(result.environment).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('container name validation', () => {
|
||||
test('rejects missing name', () => {
|
||||
const err = getErrors(() => validateDockerDeployment({ image: 'nginx' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects name starting with special char', () => {
|
||||
const err = getErrors(() => validateDockerDeployment({ name: '-bad', image: 'nginx' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects name exceeding 255 characters', () => {
|
||||
const err = getErrors(() => validateDockerDeployment({ name: 'a'.repeat(256), image: 'nginx' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('accepts name with underscores, periods, hyphens', () => {
|
||||
const result = validateDockerDeployment({ name: 'my_app.v1-test', image: 'nginx' });
|
||||
expect(result.name).toBe('my_app.v1-test');
|
||||
});
|
||||
});
|
||||
|
||||
describe('image validation', () => {
|
||||
test('rejects missing image', () => {
|
||||
const err = getErrors(() => validateDockerDeployment({ name: 'app' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('accepts simple image', () => {
|
||||
expect(validateDockerDeployment({ name: 'a', image: 'alpine' }).image).toBe('alpine');
|
||||
});
|
||||
|
||||
test('accepts image with tag', () => {
|
||||
expect(validateDockerDeployment({ name: 'a', image: 'nginx:latest' }).image).toBe('nginx:latest');
|
||||
});
|
||||
|
||||
test('accepts fully qualified image', () => {
|
||||
const result = validateDockerDeployment({ name: 'a', image: 'docker.io/library/nginx:1.21' });
|
||||
expect(result.image).toBe('docker.io/library/nginx:1.21');
|
||||
});
|
||||
|
||||
test('rejects image with semicolon', () => {
|
||||
const err = getErrors(() => validateDockerDeployment({ name: 'a', image: 'nginx;rm -rf /' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects image with $( subshell', () => {
|
||||
const err = getErrors(() => validateDockerDeployment({ name: 'a', image: 'nginx$(evil)' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects image exceeding 512 characters', () => {
|
||||
const err = getErrors(() => validateDockerDeployment({ name: 'a', image: 'a'.repeat(513) }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('ports validation', () => {
|
||||
test('rejects non-array ports', () => {
|
||||
const err = getErrors(() => validateDockerDeployment({ ...valid, ports: 'bad' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('accepts string port format "8080:80"', () => {
|
||||
const result = validateDockerDeployment({ ...valid, ports: ['8080:80'] });
|
||||
expect(result.ports).toEqual(['8080:80']);
|
||||
});
|
||||
|
||||
test('accepts port format with protocol "8080:80/tcp"', () => {
|
||||
const result = validateDockerDeployment({ ...valid, ports: ['8080:80/tcp'] });
|
||||
expect(result.ports).toEqual(['8080:80/tcp']);
|
||||
});
|
||||
|
||||
test('rejects invalid port format', () => {
|
||||
const err = getErrors(() => validateDockerDeployment({ ...valid, ports: ['bad'] }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects port numbers > 65535', () => {
|
||||
const err = getErrors(() => validateDockerDeployment({ ...valid, ports: ['70000:80'] }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects port numbers < 1', () => {
|
||||
const err = getErrors(() => validateDockerDeployment({ ...valid, ports: ['0:80'] }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('accepts numeric port values', () => {
|
||||
const result = validateDockerDeployment({ ...valid, ports: [8080] });
|
||||
expect(result.ports).toEqual([8080]);
|
||||
});
|
||||
|
||||
test('rejects non-string non-number port values', () => {
|
||||
const err = getErrors(() => validateDockerDeployment({ ...valid, ports: [{}] }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('volumes validation', () => {
|
||||
test('rejects non-array volumes', () => {
|
||||
const err = getErrors(() => validateDockerDeployment({ ...valid, volumes: 'bad' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects non-string volume entries', () => {
|
||||
const err = getErrors(() => validateDockerDeployment({ ...valid, volumes: [123] }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('environment validation', () => {
|
||||
test('rejects non-object environment', () => {
|
||||
const err = getErrors(() => validateDockerDeployment({ ...valid, environment: 'bad' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects array as environment', () => {
|
||||
const err = getErrors(() => validateDockerDeployment({ ...valid, environment: [] }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects invalid env var names', () => {
|
||||
const err = getErrors(() => validateDockerDeployment({ ...valid, environment: { '1BAD': 'val' } }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('accepts valid env var names', () => {
|
||||
const result = validateDockerDeployment({ ...valid, environment: { MY_VAR: 'test', _under: '1' } });
|
||||
expect(result.environment).toEqual({ MY_VAR: 'test', _under: '1' });
|
||||
});
|
||||
|
||||
test('accepts string, number, boolean values', () => {
|
||||
const env = { A: 'str', B: 42, C: true };
|
||||
const result = validateDockerDeployment({ ...valid, environment: env });
|
||||
expect(result.environment).toEqual(env);
|
||||
});
|
||||
|
||||
test('rejects object values', () => {
|
||||
const err = getErrors(() => validateDockerDeployment({ ...valid, environment: { X: { nested: true } } }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateFilePath', () => {
|
||||
const isWindows = process.platform === 'win32';
|
||||
|
||||
test('rejects empty path', () => {
|
||||
expect(() => validateFilePath('')).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects null path', () => {
|
||||
expect(() => validateFilePath(null)).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects path with ~', () => {
|
||||
expect(() => validateFilePath('~/secrets')).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
// On Windows, path.normalize resolves '..' so the normalized path may not contain '..'
|
||||
// On Linux, '/app/../etc/passwd' normalizes to '/etc/passwd' which is blocked
|
||||
test('blocks C:\\Windows path', () => {
|
||||
expect(() => validateFilePath('C:\\Windows\\System32')).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('blocks C:\\Program Files path', () => {
|
||||
expect(() => validateFilePath('C:\\Program Files\\test')).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
if (!isWindows) {
|
||||
test('rejects path with ..', () => {
|
||||
expect(() => validateFilePath('/app/../etc/passwd')).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('blocks /etc path', () => {
|
||||
expect(() => validateFilePath('/etc/passwd')).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('blocks /proc path', () => {
|
||||
expect(() => validateFilePath('/proc/self/environ')).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('blocks /sys path', () => {
|
||||
expect(() => validateFilePath('/sys/class')).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('blocks /root path', () => {
|
||||
expect(() => validateFilePath('/root/.ssh')).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('blocks /var/run path', () => {
|
||||
expect(() => validateFilePath('/var/run/docker.sock')).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('blocks /var/lib/docker path', () => {
|
||||
expect(() => validateFilePath('/var/lib/docker/containers')).toThrow(ValidationError);
|
||||
});
|
||||
}
|
||||
|
||||
test('returns normalized path for valid input', () => {
|
||||
const testPath = isWindows ? 'D:\\app\\data\\config' : '/app/data/config';
|
||||
const result = validateFilePath(testPath);
|
||||
expect(result).toBeTruthy();
|
||||
});
|
||||
|
||||
test('enforces allowedBasePaths when specified', () => {
|
||||
const testPath = isWindows ? 'D:\\app\\data' : '/app/data';
|
||||
const allowedBase = isWindows ? 'D:\\opt' : '/opt';
|
||||
expect(() => validateFilePath(testPath, [allowedBase])).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('accepts path within allowedBasePaths', () => {
|
||||
const testPath = isWindows ? 'D:\\opt\\myapp\\config' : '/opt/myapp/config';
|
||||
const allowedBase = isWindows ? 'D:\\opt' : '/opt';
|
||||
const result = validateFilePath(testPath, [allowedBase]);
|
||||
expect(result).toBeTruthy();
|
||||
});
|
||||
|
||||
test('accepts any path when allowedBasePaths is empty', () => {
|
||||
const testPath = isWindows ? 'D:\\app\\data' : '/app/data';
|
||||
const result = validateFilePath(testPath, []);
|
||||
expect(result).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateVolumePath', () => {
|
||||
test('rejects invalid volume format', () => {
|
||||
const errors = validateVolumePath('not-a-volume', 0);
|
||||
expect(errors.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test('rejects container path with ..', () => {
|
||||
const errors = validateVolumePath('/app/data:/../etc:ro', 0);
|
||||
expect(errors.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test('accepts valid modes: ro, rw, z, Z', () => {
|
||||
['ro', 'rw', 'z', 'Z'].forEach(mode => {
|
||||
const errors = validateVolumePath(`/app/data:/container/path:${mode}`, 0);
|
||||
// Filter to only mode-related errors
|
||||
const modeErrors = errors.filter(e => e.field && e.field.includes('mode'));
|
||||
expect(modeErrors).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
test('accepts valid volume without mode', () => {
|
||||
const errors = validateVolumePath('/app/data:/container/path', 0);
|
||||
// Should have no container path errors
|
||||
const containerErrors = errors.filter(e => e.field && e.field.includes('containerPath'));
|
||||
expect(containerErrors).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateURL', () => {
|
||||
test('rejects empty URL', () => {
|
||||
expect(() => validateURL('')).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects null URL', () => {
|
||||
expect(() => validateURL(null)).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('accepts valid https URL', () => {
|
||||
expect(validateURL('https://example.com')).toBe('https://example.com');
|
||||
});
|
||||
|
||||
test('accepts valid http URL', () => {
|
||||
expect(validateURL('http://example.com')).toBe('http://example.com');
|
||||
});
|
||||
|
||||
test('rejects non-URL strings', () => {
|
||||
expect(() => validateURL('not a url')).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('blocks localhost when blockPrivate is true', () => {
|
||||
expect(() => validateURL('http://localhost:3000', { blockPrivate: true })).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('blocks 127.0.0.1 when blockPrivate is true', () => {
|
||||
expect(() => validateURL('http://127.0.0.1:3000', { blockPrivate: true })).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('blocks private IPs when blockPrivate is true', () => {
|
||||
expect(() => validateURL('http://192.168.1.1', { blockPrivate: true })).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('allows private IPs when blockPrivate is false', () => {
|
||||
expect(validateURL('http://192.168.1.1')).toBe('http://192.168.1.1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateToken', () => {
|
||||
test('rejects empty token', () => {
|
||||
expect(() => validateToken('')).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects null token', () => {
|
||||
expect(() => validateToken(null)).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects token shorter than 8 chars', () => {
|
||||
expect(() => validateToken('short')).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects token longer than 512 chars', () => {
|
||||
expect(() => validateToken('a'.repeat(513))).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects token with semicolon', () => {
|
||||
expect(() => validateToken('token123;evil')).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects token with $( subshell', () => {
|
||||
expect(() => validateToken('token123$(evil)')).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects token with &&', () => {
|
||||
expect(() => validateToken('token123&&evil')).toThrow(ValidationError);
|
||||
});
|
||||
|
||||
test('accepts valid alphanumeric token', () => {
|
||||
expect(validateToken('abcdef12345678')).toBe('abcdef12345678');
|
||||
});
|
||||
|
||||
test('trims whitespace', () => {
|
||||
expect(validateToken(' abcdef12345678 ')).toBe('abcdef12345678');
|
||||
});
|
||||
|
||||
test('accepts token at minimum length (8)', () => {
|
||||
expect(validateToken('12345678')).toBe('12345678');
|
||||
});
|
||||
|
||||
test('accepts token at maximum length (512)', () => {
|
||||
const token = 'a'.repeat(512);
|
||||
expect(validateToken(token)).toBe(token);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateServiceConfig', () => {
|
||||
const valid = { id: 'my-service', name: 'My Service' };
|
||||
|
||||
test('accepts valid service config', () => {
|
||||
const result = validateServiceConfig(valid);
|
||||
expect(result.id).toBe('my-service');
|
||||
});
|
||||
|
||||
test('rejects missing ID', () => {
|
||||
const err = getErrors(() => validateServiceConfig({ name: 'Test' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects invalid ID format', () => {
|
||||
const err = getErrors(() => validateServiceConfig({ id: 'bad id!', name: 'Test' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects missing name', () => {
|
||||
const err = getErrors(() => validateServiceConfig({ id: 'test' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('rejects name exceeding 100 chars', () => {
|
||||
const err = getErrors(() => validateServiceConfig({ id: 'test', name: 'a'.repeat(101) }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('validates URL when present', () => {
|
||||
const err = getErrors(() => validateServiceConfig({ ...valid, url: 'not a url' }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('validates port when present', () => {
|
||||
const err = getErrors(() => validateServiceConfig({ ...valid, port: 99999 }));
|
||||
expect(err).toBeInstanceOf(ValidationError);
|
||||
});
|
||||
|
||||
test('accepts valid URL and port', () => {
|
||||
const result = validateServiceConfig({ ...valid, url: 'http://example.com', port: 8080 });
|
||||
expect(result.id).toBe('my-service');
|
||||
});
|
||||
|
||||
test('aggregates multiple errors', () => {
|
||||
const err = getErrors(() => validateServiceConfig({}));
|
||||
expect(err.errors.length).toBeGreaterThan(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isValidPort', () => {
|
||||
test('accepts port 1', () => {
|
||||
expect(isValidPort(1)).toBe(true);
|
||||
});
|
||||
|
||||
test('accepts port 65535', () => {
|
||||
expect(isValidPort(65535)).toBe(true);
|
||||
});
|
||||
|
||||
test('rejects port 0', () => {
|
||||
expect(isValidPort(0)).toBe(false);
|
||||
});
|
||||
|
||||
test('rejects port 65536', () => {
|
||||
expect(isValidPort(65536)).toBe(false);
|
||||
});
|
||||
|
||||
test('accepts string port "8080"', () => {
|
||||
expect(isValidPort('8080')).toBe(true);
|
||||
});
|
||||
|
||||
test('rejects NaN', () => {
|
||||
expect(isValidPort('abc')).toBe(false);
|
||||
});
|
||||
|
||||
test('rejects negative port', () => {
|
||||
expect(isValidPort(-1)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isPrivateIP', () => {
|
||||
test('detects 10.x.x.x as private', () => {
|
||||
expect(isPrivateIP('10.0.0.1')).toBe(true);
|
||||
expect(isPrivateIP('10.255.255.255')).toBe(true);
|
||||
});
|
||||
|
||||
test('detects 172.16-31.x.x as private', () => {
|
||||
expect(isPrivateIP('172.16.0.1')).toBe(true);
|
||||
expect(isPrivateIP('172.31.255.255')).toBe(true);
|
||||
});
|
||||
|
||||
test('does not flag 172.15.x.x as private', () => {
|
||||
expect(isPrivateIP('172.15.0.1')).toBe(false);
|
||||
});
|
||||
|
||||
test('does not flag 172.32.x.x as private', () => {
|
||||
expect(isPrivateIP('172.32.0.1')).toBe(false);
|
||||
});
|
||||
|
||||
test('detects 192.168.x.x as private', () => {
|
||||
expect(isPrivateIP('192.168.1.1')).toBe(true);
|
||||
});
|
||||
|
||||
test('detects 127.x.x.x as private', () => {
|
||||
expect(isPrivateIP('127.0.0.1')).toBe(true);
|
||||
expect(isPrivateIP('127.255.255.255')).toBe(true);
|
||||
});
|
||||
|
||||
test('detects 169.254.x.x as private', () => {
|
||||
expect(isPrivateIP('169.254.1.1')).toBe(true);
|
||||
});
|
||||
|
||||
test('detects ::1 as private', () => {
|
||||
expect(isPrivateIP('::1')).toBe(true);
|
||||
});
|
||||
|
||||
test('detects fc00: as private', () => {
|
||||
expect(isPrivateIP('fc00::1')).toBe(true);
|
||||
});
|
||||
|
||||
test('detects fe80: as private', () => {
|
||||
expect(isPrivateIP('fe80::1')).toBe(true);
|
||||
});
|
||||
|
||||
test('identifies 8.8.8.8 as public', () => {
|
||||
expect(isPrivateIP('8.8.8.8')).toBe(false);
|
||||
});
|
||||
|
||||
test('identifies 1.1.1.1 as public', () => {
|
||||
expect(isPrivateIP('1.1.1.1')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('sanitizeString', () => {
|
||||
test('escapes < to <', () => {
|
||||
expect(sanitizeString('<script>')).toBe('<script>');
|
||||
});
|
||||
|
||||
test('escapes > to >', () => {
|
||||
expect(sanitizeString('a>b')).toBe('a>b');
|
||||
});
|
||||
|
||||
test('escapes single quote to '', () => {
|
||||
expect(sanitizeString("it's")).toBe('it's');
|
||||
});
|
||||
|
||||
test('escapes double quote to "', () => {
|
||||
expect(sanitizeString('say "hi"')).toBe('say "hi"');
|
||||
});
|
||||
|
||||
test('truncates to maxLength', () => {
|
||||
expect(sanitizeString('hello world', 5)).toBe('hello');
|
||||
});
|
||||
|
||||
test('returns empty string for non-string input', () => {
|
||||
expect(sanitizeString(123)).toBe('');
|
||||
expect(sanitizeString(null)).toBe('');
|
||||
expect(sanitizeString(undefined)).toBe('');
|
||||
});
|
||||
|
||||
test('uses default maxLength of 1000', () => {
|
||||
const long = 'a'.repeat(1500);
|
||||
expect(sanitizeString(long).length).toBe(1000);
|
||||
});
|
||||
|
||||
test('returns safe strings unchanged', () => {
|
||||
expect(sanitizeString('hello world')).toBe('hello world');
|
||||
});
|
||||
});
|
||||
@@ -1,564 +0,0 @@
|
||||
/**
|
||||
* Integration Tests
|
||||
*
|
||||
* Tests multi-component workflows and end-to-end scenarios
|
||||
* Validates that all DashCaddy components work together correctly
|
||||
*/
|
||||
|
||||
const request = require('supertest');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
// Create test instance with isolated environment
|
||||
const testServicesFile = path.join(os.tmpdir(), `integration-services-${Date.now()}.json`);
|
||||
const testConfigFile = path.join(os.tmpdir(), `integration-config-${Date.now()}.json`);
|
||||
const testDnsCredsFile = path.join(os.tmpdir(), `integration-dns-${Date.now()}.json`);
|
||||
const testCaddyfile = path.join(os.tmpdir(), `integration-Caddyfile-${Date.now()}`);
|
||||
|
||||
// Set test environment
|
||||
process.env.SERVICES_FILE = testServicesFile;
|
||||
process.env.CONFIG_FILE = testConfigFile;
|
||||
process.env.DNS_CREDENTIALS_FILE = testDnsCredsFile;
|
||||
process.env.CADDYFILE_PATH = testCaddyfile;
|
||||
process.env.CADDY_ADMIN_URL = 'http://localhost:2019';
|
||||
process.env.ENABLE_HEALTH_CHECKER = 'false';
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
// Initialize test files
|
||||
fs.writeFileSync(testServicesFile, '[]', 'utf8');
|
||||
fs.writeFileSync(testConfigFile, '{"domain": "test.local"}', 'utf8');
|
||||
fs.writeFileSync(testDnsCredsFile, '{}', 'utf8');
|
||||
fs.writeFileSync(testCaddyfile, '# Test Caddyfile\n', 'utf8');
|
||||
|
||||
// Require app after environment setup
|
||||
const app = require('../server');
|
||||
|
||||
describe('Integration Tests', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
// Reset state through the API to respect file locks
|
||||
await request(app).put('/api/services').send([]);
|
||||
fs.writeFileSync(testConfigFile, '{"domain": "test.local"}', 'utf8');
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Cleanup test files
|
||||
try {
|
||||
fs.unlinkSync(testServicesFile);
|
||||
fs.unlinkSync(testConfigFile);
|
||||
fs.unlinkSync(testDnsCredsFile);
|
||||
fs.unlinkSync(testCaddyfile);
|
||||
} catch (e) {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
});
|
||||
|
||||
describe('End-to-End Service Deployment', () => {
|
||||
test('should complete full service lifecycle: add → configure → verify → delete', async () => {
|
||||
// Step 1: Add a new service
|
||||
const newService = {
|
||||
id: 'test-app',
|
||||
name: 'Test Application',
|
||||
logo: '/assets/test.png',
|
||||
url: 'https://test.test.local',
|
||||
};
|
||||
|
||||
const addRes = await request(app)
|
||||
.post('/api/services')
|
||||
.send(newService);
|
||||
|
||||
expect(addRes.statusCode).toBe(200);
|
||||
expect(addRes.body.success).toBe(true);
|
||||
|
||||
// Step 2: Verify service appears in list
|
||||
const listRes = await request(app).get('/api/services');
|
||||
expect(listRes.statusCode).toBe(200);
|
||||
expect(listRes.body.length).toBe(1);
|
||||
expect(listRes.body[0].id).toBe('test-app');
|
||||
|
||||
// Step 3: Update service configuration
|
||||
const updatedServices = [{
|
||||
...newService,
|
||||
status: 'online',
|
||||
responseTime: 150,
|
||||
}];
|
||||
|
||||
const updateRes = await request(app)
|
||||
.put('/api/services')
|
||||
.send(updatedServices);
|
||||
|
||||
expect(updateRes.statusCode).toBe(200);
|
||||
|
||||
// Step 4: Verify update
|
||||
const verifyRes = await request(app).get('/api/services');
|
||||
expect(verifyRes.body[0].status).toBe('online');
|
||||
|
||||
// Step 5: Delete service
|
||||
const deleteRes = await request(app).delete('/api/services/test-app');
|
||||
expect(deleteRes.statusCode).toBe(200);
|
||||
|
||||
// Step 6: Verify deletion
|
||||
const finalRes = await request(app).get('/api/services');
|
||||
expect(finalRes.body.length).toBe(0);
|
||||
});
|
||||
|
||||
test('should handle app deployment workflow: template → configure → deploy', async () => {
|
||||
// Step 1: Get app template
|
||||
const templateRes = await request(app).get('/api/apps/templates/jellyfin');
|
||||
expect(templateRes.statusCode).toBe(200);
|
||||
expect(templateRes.body.success).toBe(true);
|
||||
const template = templateRes.body.template;
|
||||
|
||||
// Step 2: Configure app from template
|
||||
const appConfig = {
|
||||
id: 'jellyfin',
|
||||
name: template.name,
|
||||
logo: template.logo,
|
||||
port: 8096,
|
||||
subdomain: 'jellyfin',
|
||||
};
|
||||
|
||||
// Step 3: Add configured service
|
||||
const deployRes = await request(app)
|
||||
.post('/api/services')
|
||||
.send(appConfig);
|
||||
|
||||
expect(deployRes.statusCode).toBe(200);
|
||||
|
||||
// Step 4: Verify service is listed
|
||||
const servicesRes = await request(app).get('/api/services');
|
||||
expect(servicesRes.body).toContainEqual(
|
||||
expect.objectContaining({ id: 'jellyfin' }),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multi-Service Management', () => {
|
||||
test('should handle multiple services concurrently', async () => {
|
||||
// Deploy 5 services simultaneously (reduced from 10 to avoid overwhelming)
|
||||
const services = Array.from({ length: 5 }, (_, i) => ({
|
||||
id: `concurrent-${i}`,
|
||||
name: `Concurrent Service ${i}`,
|
||||
logo: `/assets/service-${i}.png`,
|
||||
}));
|
||||
|
||||
const deployPromises = services.map(service =>
|
||||
request(app).post('/api/services').send(service),
|
||||
);
|
||||
|
||||
const results = await Promise.all(deployPromises);
|
||||
|
||||
// All deployments should succeed
|
||||
results.forEach((res, index) => {
|
||||
if (res.statusCode !== 200) {
|
||||
console.log(`Service ${index} failed:`, res.body);
|
||||
}
|
||||
expect(res.statusCode).toBe(200);
|
||||
});
|
||||
|
||||
// Verify all services are listed
|
||||
const listRes = await request(app).get('/api/services');
|
||||
expect(listRes.body.length).toBe(5);
|
||||
});
|
||||
|
||||
test('should handle bulk import and individual updates', async () => {
|
||||
// Step 1: Bulk import services
|
||||
const bulkServices = [
|
||||
{ id: 'plex', name: 'Plex' },
|
||||
{ id: 'jellyfin', name: 'Jellyfin' },
|
||||
{ id: 'emby', name: 'Emby' },
|
||||
];
|
||||
|
||||
const importRes = await request(app)
|
||||
.put('/api/services')
|
||||
.send(bulkServices);
|
||||
|
||||
expect(importRes.statusCode).toBe(200);
|
||||
|
||||
// Step 2: Update individual service
|
||||
const updatedServices = [
|
||||
{ id: 'plex', name: 'Plex', status: 'online' },
|
||||
{ id: 'jellyfin', name: 'Jellyfin' },
|
||||
{ id: 'emby', name: 'Emby' },
|
||||
];
|
||||
|
||||
await request(app).put('/api/services').send(updatedServices);
|
||||
|
||||
// Step 3: Verify specific service was updated
|
||||
const services = JSON.parse(fs.readFileSync(testServicesFile, 'utf8'));
|
||||
const plexService = services.find(s => s.id === 'plex');
|
||||
expect(plexService.status).toBe('online');
|
||||
});
|
||||
|
||||
test('should maintain data consistency across operations', async () => {
|
||||
// Perform series of operations
|
||||
await request(app).post('/api/services').send({ id: 's1', name: 'Service 1' });
|
||||
await request(app).post('/api/services').send({ id: 's2', name: 'Service 2' });
|
||||
await request(app).post('/api/services').send({ id: 's3', name: 'Service 3' });
|
||||
|
||||
// Verify count
|
||||
let services = JSON.parse(fs.readFileSync(testServicesFile, 'utf8'));
|
||||
expect(services.length).toBe(3);
|
||||
|
||||
// Delete one
|
||||
await request(app).delete('/api/services/s2');
|
||||
|
||||
// Verify count and content
|
||||
services = JSON.parse(fs.readFileSync(testServicesFile, 'utf8'));
|
||||
expect(services.length).toBe(2);
|
||||
expect(services.find(s => s.id === 's2')).toBeUndefined();
|
||||
expect(services.find(s => s.id === 's1')).toBeDefined();
|
||||
expect(services.find(s => s.id === 's3')).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Configuration Management Integration', () => {
|
||||
test('should coordinate config changes with service updates', async () => {
|
||||
// Step 1: Set initial config
|
||||
const config = {
|
||||
domain: 'example.local',
|
||||
theme: 'dark',
|
||||
enableHealthCheck: false,
|
||||
};
|
||||
|
||||
const configRes = await request(app)
|
||||
.post('/api/config')
|
||||
.send(config);
|
||||
|
||||
expect(configRes.statusCode).toBe(200);
|
||||
|
||||
// Step 2: Add service that uses config
|
||||
const service = {
|
||||
id: 'test',
|
||||
name: 'Test Service',
|
||||
subdomain: 'test',
|
||||
};
|
||||
|
||||
await request(app).post('/api/services').send(service);
|
||||
|
||||
// Step 3: Verify config persists
|
||||
const getConfigRes = await request(app).get('/api/config');
|
||||
expect(getConfigRes.body.domain).toBe('example.local');
|
||||
|
||||
// Step 4: Update config
|
||||
const newConfig = { ...config, theme: 'light' };
|
||||
await request(app).post('/api/config').send(newConfig);
|
||||
|
||||
// Step 5: Verify service still exists and config updated
|
||||
const servicesRes = await request(app).get('/api/services');
|
||||
const configCheckRes = await request(app).get('/api/config');
|
||||
|
||||
expect(servicesRes.body.length).toBe(1);
|
||||
expect(configCheckRes.body.theme).toBe('light');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Template Discovery and Deployment', () => {
|
||||
test('should list templates, select one, and deploy', async () => {
|
||||
// Step 1: Get all templates
|
||||
const templatesRes = await request(app).get('/api/apps/templates');
|
||||
expect(templatesRes.statusCode).toBe(200);
|
||||
expect(Object.keys(templatesRes.body.templates).length).toBeGreaterThan(50);
|
||||
|
||||
// Step 2: Verify categories exist (format may vary)
|
||||
expect(templatesRes.body).toHaveProperty('categories');
|
||||
const categories = templatesRes.body.categories;
|
||||
// Categories might be an array or object depending on implementation
|
||||
expect(categories).toBeTruthy();
|
||||
|
||||
// Step 3: Select a specific template
|
||||
const templateIds = Object.keys(templatesRes.body.templates);
|
||||
const firstTemplateId = templateIds[0];
|
||||
|
||||
const singleTemplateRes = await request(app)
|
||||
.get(`/api/apps/templates/${firstTemplateId}`);
|
||||
|
||||
expect(singleTemplateRes.statusCode).toBe(200);
|
||||
expect(singleTemplateRes.body.template).toHaveProperty('name');
|
||||
expect(singleTemplateRes.body.template).toHaveProperty('docker');
|
||||
|
||||
// Step 4: Deploy service from template
|
||||
const service = {
|
||||
id: firstTemplateId,
|
||||
name: singleTemplateRes.body.template.name,
|
||||
logo: singleTemplateRes.body.template.logo,
|
||||
};
|
||||
|
||||
const deployRes = await request(app)
|
||||
.post('/api/services')
|
||||
.send(service);
|
||||
|
||||
expect(deployRes.statusCode).toBe(200);
|
||||
});
|
||||
|
||||
test('should handle template with complex configuration', async () => {
|
||||
// Get a complex template (Plex has environment variables, volumes, etc.)
|
||||
const templateRes = await request(app).get('/api/apps/templates/plex');
|
||||
expect(templateRes.statusCode).toBe(200);
|
||||
|
||||
const template = templateRes.body.template;
|
||||
|
||||
// Verify template has complex config
|
||||
expect(template.docker).toHaveProperty('image');
|
||||
expect(template.docker).toHaveProperty('environment');
|
||||
expect(template.docker).toHaveProperty('volumes');
|
||||
|
||||
// Deploy with configuration
|
||||
const service = {
|
||||
id: 'plex-prod',
|
||||
name: 'Plex Production',
|
||||
logo: template.logo,
|
||||
port: 32400,
|
||||
subdomain: 'plex',
|
||||
};
|
||||
|
||||
const deployRes = await request(app)
|
||||
.post('/api/services')
|
||||
.send(service);
|
||||
|
||||
expect(deployRes.statusCode).toBe(200);
|
||||
|
||||
// Verify service exists
|
||||
const servicesRes = await request(app).get('/api/services');
|
||||
expect(servicesRes.body).toContainEqual(
|
||||
expect.objectContaining({ id: 'plex-prod' }),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error Recovery and Resilience', () => {
|
||||
test('should recover from invalid service data', async () => {
|
||||
// Add valid service
|
||||
await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'valid', name: 'Valid Service' });
|
||||
|
||||
// Try to add invalid service
|
||||
const invalidRes = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'invalid' }); // Missing required 'name' field
|
||||
|
||||
expect(invalidRes.statusCode).toBe(400);
|
||||
|
||||
// Verify valid service still exists
|
||||
const servicesRes = await request(app).get('/api/services');
|
||||
expect(servicesRes.body.length).toBe(1);
|
||||
expect(servicesRes.body[0].id).toBe('valid');
|
||||
});
|
||||
|
||||
test('should handle file corruption gracefully', async () => {
|
||||
// Add some services
|
||||
await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 's1', name: 'Service 1' });
|
||||
|
||||
// Simulate file corruption (invalid JSON)
|
||||
fs.writeFileSync(testServicesFile, '{ invalid json }', 'utf8');
|
||||
|
||||
// API should handle this gracefully
|
||||
const res = await request(app).get('/api/services');
|
||||
|
||||
// Should either return error or empty array (depending on implementation)
|
||||
expect([200, 500]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('should maintain consistency during concurrent modifications', async () => {
|
||||
// Start with empty state
|
||||
const initialServices = [
|
||||
{ id: 'base1', name: 'Base 1' },
|
||||
{ id: 'base2', name: 'Base 2' },
|
||||
];
|
||||
|
||||
await request(app).put('/api/services').send(initialServices);
|
||||
|
||||
// Perform concurrent operations
|
||||
const operations = [
|
||||
request(app).post('/api/services').send({ id: 'new1', name: 'New 1' }),
|
||||
request(app).post('/api/services').send({ id: 'new2', name: 'New 2' }),
|
||||
request(app).delete('/api/services/base1'),
|
||||
request(app).post('/api/services').send({ id: 'new3', name: 'New 3' }),
|
||||
];
|
||||
|
||||
await Promise.all(operations);
|
||||
|
||||
// Verify final state is consistent
|
||||
const services = JSON.parse(fs.readFileSync(testServicesFile, 'utf8'));
|
||||
|
||||
// Should have base2 + 3 new services = 4 total
|
||||
expect(services.length).toBe(4);
|
||||
expect(services.find(s => s.id === 'base1')).toBeUndefined();
|
||||
expect(services.find(s => s.id === 'base2')).toBeDefined();
|
||||
expect(services.find(s => s.id === 'new1')).toBeDefined();
|
||||
expect(services.find(s => s.id === 'new2')).toBeDefined();
|
||||
expect(services.find(s => s.id === 'new3')).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Health Check Integration', () => {
|
||||
test('should verify API health before operations', async () => {
|
||||
// Check health
|
||||
const healthRes = await request(app).get('/api/health');
|
||||
expect(healthRes.statusCode).toBe(200);
|
||||
expect(healthRes.body.status).toBe('ok');
|
||||
|
||||
// Perform operation
|
||||
const addRes = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'test', name: 'Test' });
|
||||
|
||||
expect(addRes.statusCode).toBe(200);
|
||||
|
||||
// Check health again
|
||||
const healthRes2 = await request(app).get('/api/health');
|
||||
expect(healthRes2.statusCode).toBe(200);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Real-World Workflow Scenarios', () => {
|
||||
test('Scenario: User discovers and deploys multiple media apps', async () => {
|
||||
// Step 1: Browse templates
|
||||
const templatesRes = await request(app).get('/api/apps/templates');
|
||||
const templates = templatesRes.body.templates;
|
||||
|
||||
// Step 2: Find media apps
|
||||
const mediaApps = ['plex', 'jellyfin', 'emby'];
|
||||
const selectedApps = mediaApps.map(id => ({
|
||||
id,
|
||||
name: templates[id].name,
|
||||
logo: templates[id].logo,
|
||||
}));
|
||||
|
||||
// Step 3: Deploy all media apps
|
||||
for (const serviceConfig of selectedApps) {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send(serviceConfig);
|
||||
expect(res.statusCode).toBe(200);
|
||||
}
|
||||
|
||||
// Step 4: Verify all deployed
|
||||
const servicesRes = await request(app).get('/api/services');
|
||||
expect(servicesRes.body.length).toBe(3);
|
||||
|
||||
mediaApps.forEach(appId => {
|
||||
expect(servicesRes.body.find(s => s.id === appId)).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
test('Scenario: Admin configures system and imports existing services', async () => {
|
||||
// Step 1: Set system configuration
|
||||
const config = {
|
||||
domain: 'homelab.local',
|
||||
theme: 'dark',
|
||||
enableHealthCheck: true,
|
||||
};
|
||||
|
||||
await request(app).post('/api/config').send(config);
|
||||
|
||||
// Step 2: Import existing services from backup
|
||||
const existingServices = [
|
||||
{ id: 'router', name: 'Router', logo: '/assets/router.png' },
|
||||
{ id: 'nas', name: 'NAS', logo: '/assets/nas.png' },
|
||||
{ id: 'pihole', name: 'Pi-hole', logo: '/assets/pihole.png' },
|
||||
];
|
||||
|
||||
await request(app).put('/api/services').send(existingServices);
|
||||
|
||||
// Step 3: Add new service
|
||||
await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'newapp', name: 'New App' });
|
||||
|
||||
// Step 4: Verify all services
|
||||
const servicesRes = await request(app).get('/api/services');
|
||||
expect(servicesRes.body.length).toBe(4);
|
||||
|
||||
// Step 5: Verify config persisted
|
||||
const configRes = await request(app).get('/api/config');
|
||||
expect(configRes.body.domain).toBe('homelab.local');
|
||||
});
|
||||
|
||||
test('Scenario: User reorganizes services (delete old, add new)', async () => {
|
||||
// Step 1: Start with existing services
|
||||
const oldServices = [
|
||||
{ id: 'old1', name: 'Old Service 1' },
|
||||
{ id: 'old2', name: 'Old Service 2' },
|
||||
{ id: 'keep', name: 'Keep This' },
|
||||
];
|
||||
|
||||
await request(app).put('/api/services').send(oldServices);
|
||||
|
||||
// Step 2: Delete old services
|
||||
await request(app).delete('/api/services/old1');
|
||||
await request(app).delete('/api/services/old2');
|
||||
|
||||
// Step 3: Add new services
|
||||
await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'new1', name: 'New Service 1' });
|
||||
|
||||
await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'new2', name: 'New Service 2' });
|
||||
|
||||
// Step 4: Verify final state
|
||||
const servicesRes = await request(app).get('/api/services');
|
||||
expect(servicesRes.body.length).toBe(3);
|
||||
|
||||
const serviceIds = servicesRes.body.map(s => s.id);
|
||||
expect(serviceIds).toContain('keep');
|
||||
expect(serviceIds).toContain('new1');
|
||||
expect(serviceIds).toContain('new2');
|
||||
expect(serviceIds).not.toContain('old1');
|
||||
expect(serviceIds).not.toContain('old2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Data Persistence and State Management', () => {
|
||||
test('should persist data across multiple operations', async () => {
|
||||
// Create initial state
|
||||
await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'persistent', name: 'Persistent Service' });
|
||||
|
||||
// Read file directly
|
||||
const services1 = JSON.parse(fs.readFileSync(testServicesFile, 'utf8'));
|
||||
expect(services1.length).toBe(1);
|
||||
|
||||
// Modify through API
|
||||
await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'another', name: 'Another Service' });
|
||||
|
||||
// Read file again
|
||||
const services2 = JSON.parse(fs.readFileSync(testServicesFile, 'utf8'));
|
||||
expect(services2.length).toBe(2);
|
||||
|
||||
// Verify through API
|
||||
const apiRes = await request(app).get('/api/services');
|
||||
expect(apiRes.body.length).toBe(2);
|
||||
|
||||
// All three methods should show same data
|
||||
expect(services2).toEqual(apiRes.body);
|
||||
});
|
||||
|
||||
test('should handle rapid sequential operations', async () => {
|
||||
// Perform 10 rapid operations (sequential, not parallel)
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: `rapid-${i}`, name: `Rapid ${i}` });
|
||||
|
||||
if (res.statusCode !== 200) {
|
||||
console.log(`Rapid operation ${i} failed:`, res.body);
|
||||
}
|
||||
expect(res.statusCode).toBe(200);
|
||||
}
|
||||
|
||||
// Verify all 10 services exist
|
||||
const services = JSON.parse(fs.readFileSync(testServicesFile, 'utf8'));
|
||||
expect(services.length).toBe(10);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,21 +0,0 @@
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
|
||||
// Use temp directory for all file-based operations during tests
|
||||
const tmpDir = path.join(os.tmpdir(), 'dashcaddy-tests');
|
||||
|
||||
// Prevent modules from touching production files
|
||||
process.env.ENCRYPTION_KEY_FILE = path.join(tmpDir, '.encryption-key');
|
||||
process.env.DASHCADDY_ENCRYPTION_KEY = 'a'.repeat(64); // 32 bytes in hex for test determinism
|
||||
|
||||
// Suppress console output during tests (set DEBUG_TESTS=1 to enable)
|
||||
if (!process.env.DEBUG_TESTS) {
|
||||
global.console = {
|
||||
...console,
|
||||
log: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
error: jest.fn(),
|
||||
info: jest.fn(),
|
||||
debug: jest.fn(),
|
||||
};
|
||||
}
|
||||
@@ -1,164 +0,0 @@
|
||||
/**
|
||||
* Tests for logger-utils.js
|
||||
* Created: 2026-03-21
|
||||
*/
|
||||
|
||||
const { sanitizeForLog, redactCredential, safeLog, SENSITIVE_FIELDS } = require('../logger-utils');
|
||||
|
||||
describe('logger-utils', () => {
|
||||
describe('sanitizeForLog', () => {
|
||||
test('should redact sensitive field names', () => {
|
||||
const input = {
|
||||
username: 'admin',
|
||||
password: 'secret123',
|
||||
apiKey: 'abc-def-ghi',
|
||||
token: 'xyz123',
|
||||
};
|
||||
|
||||
const result = sanitizeForLog(input);
|
||||
|
||||
expect(result.username).toBe('admin');
|
||||
expect(result.password).toBe('[REDACTED]');
|
||||
expect(result.apiKey).toBe('[REDACTED]');
|
||||
expect(result.token).toBe('[REDACTED]');
|
||||
});
|
||||
|
||||
test('should handle nested objects', () => {
|
||||
const input = {
|
||||
user: {
|
||||
name: 'Alice',
|
||||
credentials: {
|
||||
password: 'secret',
|
||||
token: 'abc123',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = sanitizeForLog(input);
|
||||
|
||||
expect(result.user.name).toBe('Alice');
|
||||
expect(result.user.credentials.password).toBe('[REDACTED]');
|
||||
expect(result.user.credentials.token).toBe('[REDACTED]');
|
||||
});
|
||||
|
||||
test('should handle arrays', () => {
|
||||
const input = [
|
||||
{ name: 'user1', password: 'pass1' },
|
||||
{ name: 'user2', secret: 'pass2' },
|
||||
];
|
||||
|
||||
const result = sanitizeForLog(input);
|
||||
|
||||
expect(result[0].name).toBe('user1');
|
||||
expect(result[0].password).toBe('[REDACTED]');
|
||||
expect(result[1].name).toBe('user2');
|
||||
expect(result[1].secret).toBe('[REDACTED]');
|
||||
});
|
||||
|
||||
test('should handle null and undefined', () => {
|
||||
expect(sanitizeForLog(null)).toBeNull();
|
||||
expect(sanitizeForLog(undefined)).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should support additional sensitive keys', () => {
|
||||
const input = {
|
||||
email: 'user@example.com',
|
||||
ssn: '123-45-6789',
|
||||
};
|
||||
|
||||
const result = sanitizeForLog(input, ['ssn']);
|
||||
|
||||
expect(result.email).toBe('user@example.com');
|
||||
expect(result.ssn).toBe('[REDACTED]');
|
||||
});
|
||||
|
||||
test('should be case-insensitive for field matching', () => {
|
||||
const input = {
|
||||
PASSWORD: 'secret',
|
||||
ApiKey: 'key123',
|
||||
Bearer_Token: 'token456',
|
||||
};
|
||||
|
||||
const result = sanitizeForLog(input);
|
||||
|
||||
expect(result.PASSWORD).toBe('[REDACTED]');
|
||||
expect(result.ApiKey).toBe('[REDACTED]');
|
||||
expect(result.Bearer_Token).toBe('[REDACTED]');
|
||||
});
|
||||
});
|
||||
|
||||
describe('redactCredential', () => {
|
||||
test('should show first and last 4 characters for long strings', () => {
|
||||
const input = 'abcdefghijklmnop';
|
||||
const result = redactCredential(input);
|
||||
|
||||
expect(result).toMatch(/^abcd.*mnop$/);
|
||||
expect(result).toContain('*');
|
||||
});
|
||||
|
||||
test('should fully redact short strings', () => {
|
||||
expect(redactCredential('short')).toBe('[REDACTED]');
|
||||
expect(redactCredential('12345678')).toBe('[REDACTED]');
|
||||
});
|
||||
|
||||
test('should handle null/undefined', () => {
|
||||
expect(redactCredential(null)).toBe('[REDACTED]');
|
||||
expect(redactCredential(undefined)).toBe('[REDACTED]');
|
||||
});
|
||||
|
||||
test('should handle non-string input', () => {
|
||||
expect(redactCredential(12345)).toBe('[REDACTED]');
|
||||
expect(redactCredential({})).toBe('[REDACTED]');
|
||||
});
|
||||
|
||||
test('should limit middle asterisks to 10', () => {
|
||||
const input = 'a'.repeat(100);
|
||||
const result = redactCredential(input);
|
||||
|
||||
const asteriskMatch = result.match(/\*/g);
|
||||
expect(asteriskMatch).toBeTruthy();
|
||||
expect(asteriskMatch.length).toBe(10);
|
||||
});
|
||||
});
|
||||
|
||||
describe('safeLog', () => {
|
||||
test('should create safe log object with message and sanitized data', () => {
|
||||
const result = safeLog('User login', {
|
||||
username: 'alice',
|
||||
password: 'secret123',
|
||||
});
|
||||
|
||||
expect(result).toHaveProperty('message', 'User login');
|
||||
expect(result).toHaveProperty('timestamp');
|
||||
expect(result.data.username).toBe('alice');
|
||||
expect(result.data.password).toBe('[REDACTED]');
|
||||
});
|
||||
|
||||
test('should include timestamp in ISO format', () => {
|
||||
const result = safeLog('Test message');
|
||||
|
||||
expect(result.timestamp).toMatch(/^\d{4}-\d{2}-\d{2}T/);
|
||||
});
|
||||
|
||||
test('should handle empty data', () => {
|
||||
const result = safeLog('Test message');
|
||||
|
||||
expect(result.message).toBe('Test message');
|
||||
expect(result.data).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('SENSITIVE_FIELDS constant', () => {
|
||||
test('should include common sensitive field names', () => {
|
||||
expect(SENSITIVE_FIELDS).toContain('password');
|
||||
expect(SENSITIVE_FIELDS).toContain('token');
|
||||
expect(SENSITIVE_FIELDS).toContain('secret');
|
||||
expect(SENSITIVE_FIELDS).toContain('apiKey');
|
||||
expect(SENSITIVE_FIELDS).toContain('privateKey');
|
||||
});
|
||||
|
||||
test('should have reasonable length', () => {
|
||||
expect(SENSITIVE_FIELDS.length).toBeGreaterThan(10);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,51 +0,0 @@
|
||||
/**
|
||||
* Container Log Route Tests
|
||||
*
|
||||
* Tests Docker container log listing and retrieval endpoints
|
||||
* Note: These tests run against the real Docker socket if available,
|
||||
* or will gracefully handle Docker being unavailable in CI.
|
||||
*/
|
||||
|
||||
const request = require('supertest');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
const testServicesFile = path.join(os.tmpdir(), `logs-services-${Date.now()}.json`);
|
||||
const testConfigFile = path.join(os.tmpdir(), `logs-config-${Date.now()}.json`);
|
||||
|
||||
process.env.SERVICES_FILE = testServicesFile;
|
||||
process.env.CONFIG_FILE = testConfigFile;
|
||||
process.env.ENABLE_HEALTH_CHECKER = 'false';
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
fs.writeFileSync(testServicesFile, '[]', 'utf8');
|
||||
fs.writeFileSync(testConfigFile, '{}', 'utf8');
|
||||
|
||||
const app = require('../server');
|
||||
|
||||
describe('Container Log Routes', () => {
|
||||
afterAll(() => {
|
||||
try { fs.unlinkSync(testServicesFile); } catch (e) { /* ignore */ }
|
||||
try { fs.unlinkSync(testConfigFile); } catch (e) { /* ignore */ }
|
||||
});
|
||||
|
||||
describe('GET /api/logs/containers', () => {
|
||||
test('should return 200 with containers array', async () => {
|
||||
const res = await request(app).get('/api/logs/containers');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(Array.isArray(res.body.containers)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/logs/container/:id', () => {
|
||||
test('should return 404 or 500 for nonexistent container', async () => {
|
||||
const res = await request(app).get('/api/logs/container/nonexistent');
|
||||
|
||||
// Docker will throw a not-found error for an invalid container ID
|
||||
expect([404, 500]).toContain(res.statusCode);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,98 +0,0 @@
|
||||
/**
|
||||
* Monitoring Route Tests
|
||||
*
|
||||
* Tests resource monitoring endpoints and legacy container stats endpoints.
|
||||
* Note: GET /api/stats/containers requires a live Docker connection, so in the
|
||||
* test environment it will return 500 (Docker unavailable). We assert both
|
||||
* the happy path (200) and the expected failure (500) to keep the test green
|
||||
* regardless of whether Docker is running.
|
||||
*/
|
||||
|
||||
const request = require('supertest');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
const testServicesFile = path.join(os.tmpdir(), `monitoring-services-${Date.now()}.json`);
|
||||
const testConfigFile = path.join(os.tmpdir(), `monitoring-config-${Date.now()}.json`);
|
||||
|
||||
process.env.SERVICES_FILE = testServicesFile;
|
||||
process.env.CONFIG_FILE = testConfigFile;
|
||||
process.env.ENABLE_HEALTH_CHECKER = 'false';
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
fs.writeFileSync(testServicesFile, '[]', 'utf8');
|
||||
fs.writeFileSync(testConfigFile, '{}', 'utf8');
|
||||
|
||||
const app = require('../server');
|
||||
|
||||
describe('Monitoring Routes', () => {
|
||||
afterAll(() => {
|
||||
try { fs.unlinkSync(testServicesFile); } catch (e) { /* ignore */ }
|
||||
try { fs.unlinkSync(testConfigFile); } catch (e) { /* ignore */ }
|
||||
});
|
||||
|
||||
describe('GET /api/monitoring/stats', () => {
|
||||
test('should return 200 with stats data', async () => {
|
||||
const res = await request(app).get('/api/monitoring/stats');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body).toHaveProperty('stats');
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/monitoring/stats/:containerId', () => {
|
||||
test('should return 404 for non-existent container', async () => {
|
||||
const res = await request(app).get('/api/monitoring/stats/nonexistent-container');
|
||||
|
||||
expect(res.statusCode).toBe(404);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/monitoring/history/:containerId', () => {
|
||||
test('should return 200 with history array for any container ID', async () => {
|
||||
const res = await request(app).get('/api/monitoring/history/some-container');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body).toHaveProperty('history');
|
||||
expect(res.body).toHaveProperty('hours');
|
||||
});
|
||||
|
||||
test('should accept hours query parameter', async () => {
|
||||
const res = await request(app)
|
||||
.get('/api/monitoring/history/some-container')
|
||||
.query({ hours: 6 });
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.hours).toBe(6);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/monitoring/alerts/:containerId', () => {
|
||||
test('should return 200 with alert config (empty for unknown container)', async () => {
|
||||
const res = await request(app).get('/api/monitoring/alerts/unknown-container');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body).toHaveProperty('config');
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/stats/containers', () => {
|
||||
test('should return 200 with containers array or 500 if Docker unavailable', async () => {
|
||||
const res = await request(app).get('/api/stats/containers');
|
||||
|
||||
// In test environment Docker may not be available
|
||||
expect([200, 500]).toContain(res.statusCode);
|
||||
|
||||
if (res.statusCode === 200) {
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body).toHaveProperty('stats');
|
||||
expect(Array.isArray(res.body.stats)).toBe(true);
|
||||
expect(res.body).toHaveProperty('timestamp');
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,181 +0,0 @@
|
||||
/**
|
||||
* Notification Route Tests
|
||||
*
|
||||
* Tests notification configuration, test delivery, and history endpoints.
|
||||
* Notifications are mounted at /api/notifications/ prefix.
|
||||
*/
|
||||
|
||||
const request = require('supertest');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
const testServicesFile = path.join(os.tmpdir(), `notifications-services-${Date.now()}.json`);
|
||||
const testConfigFile = path.join(os.tmpdir(), `notifications-config-${Date.now()}.json`);
|
||||
|
||||
process.env.SERVICES_FILE = testServicesFile;
|
||||
process.env.CONFIG_FILE = testConfigFile;
|
||||
process.env.ENABLE_HEALTH_CHECKER = 'false';
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
fs.writeFileSync(testServicesFile, '[]', 'utf8');
|
||||
fs.writeFileSync(testConfigFile, '{}', 'utf8');
|
||||
|
||||
const app = require('../server');
|
||||
|
||||
describe('Notification Routes', () => {
|
||||
afterAll(() => {
|
||||
try { fs.unlinkSync(testServicesFile); } catch (e) { /* ignore */ }
|
||||
try { fs.unlinkSync(testConfigFile); } catch (e) { /* ignore */ }
|
||||
});
|
||||
|
||||
describe('GET /api/notifications/config', () => {
|
||||
test('should return 200 with config object', async () => {
|
||||
const res = await request(app).get('/api/notifications/config');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body).toHaveProperty('config');
|
||||
expect(res.body.config).toHaveProperty('enabled');
|
||||
expect(res.body.config).toHaveProperty('providers');
|
||||
expect(res.body.config.providers).toHaveProperty('discord');
|
||||
expect(res.body.config.providers).toHaveProperty('telegram');
|
||||
expect(res.body.config.providers).toHaveProperty('ntfy');
|
||||
});
|
||||
|
||||
test('should redact sensitive provider data', async () => {
|
||||
const res = await request(app).get('/api/notifications/config');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
// Should show enabled/configured flags, not raw webhook URLs or tokens
|
||||
const discord = res.body.config.providers.discord;
|
||||
expect(discord).toHaveProperty('enabled');
|
||||
expect(discord).toHaveProperty('configured');
|
||||
expect(discord).not.toHaveProperty('webhookUrl');
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/notifications/config', () => {
|
||||
test('should return 200 when updating enabled state', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/notifications/config')
|
||||
.send({ enabled: true });
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body.message).toContain('updated');
|
||||
});
|
||||
|
||||
test('should return 200 when updating event settings', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/notifications/config')
|
||||
.send({
|
||||
events: {
|
||||
containerDown: true,
|
||||
containerUp: false,
|
||||
},
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
});
|
||||
|
||||
test('should reject invalid Discord webhook URL', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/notifications/config')
|
||||
.send({
|
||||
providers: {
|
||||
discord: {
|
||||
enabled: true,
|
||||
webhookUrl: 'not-a-valid-url',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test('should reject invalid ntfy topic', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/notifications/config')
|
||||
.send({
|
||||
providers: {
|
||||
ntfy: {
|
||||
enabled: true,
|
||||
topic: 'invalid topic with spaces!!!',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/notifications/test', () => {
|
||||
test('should handle test with unknown provider', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/notifications/test')
|
||||
.send({ provider: 'unknown_provider' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test('should handle test with no provider (tests all enabled)', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/notifications/test')
|
||||
.send({});
|
||||
|
||||
// When no providers are configured, should still return 200
|
||||
// with sent: true (but results array may be empty or have failures)
|
||||
expect([200, 400]).toContain(res.statusCode);
|
||||
if (res.statusCode === 200) {
|
||||
expect(res.body.success).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
test('should handle discord test gracefully when not configured', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/notifications/test')
|
||||
.send({ provider: 'discord' });
|
||||
|
||||
// Discord test without a webhook URL configured will fail
|
||||
// but should still return 200 with success: false
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body).toHaveProperty('success');
|
||||
expect(res.body.provider).toBe('discord');
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/notifications/history', () => {
|
||||
test('should return 200 with history array', async () => {
|
||||
const res = await request(app).get('/api/notifications/history');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body).toHaveProperty('history');
|
||||
expect(Array.isArray(res.body.history)).toBe(true);
|
||||
expect(res.body).toHaveProperty('total');
|
||||
expect(typeof res.body.total).toBe('number');
|
||||
});
|
||||
|
||||
test('should respect limit query parameter', async () => {
|
||||
const res = await request(app)
|
||||
.get('/api/notifications/history')
|
||||
.query({ limit: 10 });
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body.history.length).toBeLessThanOrEqual(10);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /api/notifications/history', () => {
|
||||
test('should clear notification history', async () => {
|
||||
const res = await request(app).delete('/api/notifications/history');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body.message).toContain('cleared');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,294 +0,0 @@
|
||||
// resource-monitor.js creates a Docker instance at module level.
|
||||
// On test machines without Docker, the constructor reads from non-existent files (returns defaults).
|
||||
|
||||
const resourceMonitor = require('../resource-monitor');
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset singleton state
|
||||
resourceMonitor.stats = new Map();
|
||||
resourceMonitor.alerts = new Map();
|
||||
resourceMonitor.lastAlerts = new Map();
|
||||
resourceMonitor.monitoring = false;
|
||||
if (resourceMonitor.monitoringInterval) {
|
||||
clearInterval(resourceMonitor.monitoringInterval);
|
||||
resourceMonitor.monitoringInterval = null;
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
resourceMonitor.stop();
|
||||
});
|
||||
|
||||
// Helper: create a stat entry
|
||||
function makeStat(cpu = 10, memory = 50, timestamp = new Date().toISOString()) {
|
||||
return {
|
||||
timestamp,
|
||||
cpu: { percent: cpu, usage: cpu * 1000 },
|
||||
memory: { usage: memory * 1024 * 1024, limit: 1024 * 1024 * 1024, percent: memory, usageMB: memory, limitMB: 1024 },
|
||||
network: { rxBytes: 0, txBytes: 0, rxMB: 0, txMB: 0 },
|
||||
disk: { readBytes: 0, writeBytes: 0, readMB: 0, writeMB: 0 },
|
||||
pids: 5,
|
||||
};
|
||||
}
|
||||
|
||||
describe('recordStats', () => {
|
||||
test('creates new entry for unknown container', () => {
|
||||
resourceMonitor.recordStats('c1', '/my-app', makeStat());
|
||||
expect(resourceMonitor.stats.has('c1')).toBe(true);
|
||||
expect(resourceMonitor.stats.get('c1').history).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('appends to existing history', () => {
|
||||
resourceMonitor.recordStats('c1', '/my-app', makeStat());
|
||||
resourceMonitor.recordStats('c1', '/my-app', makeStat());
|
||||
expect(resourceMonitor.stats.get('c1').history).toHaveLength(2);
|
||||
});
|
||||
|
||||
test('updates container name', () => {
|
||||
resourceMonitor.recordStats('c1', '/old-name', makeStat());
|
||||
resourceMonitor.recordStats('c1', '/new-name', makeStat());
|
||||
expect(resourceMonitor.stats.get('c1').name).toBe('/new-name');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCurrentStats', () => {
|
||||
test('returns null for unknown container', () => {
|
||||
expect(resourceMonitor.getCurrentStats('nonexistent')).toBeNull();
|
||||
});
|
||||
|
||||
test('returns latest history entry', () => {
|
||||
const stat1 = makeStat(10);
|
||||
const stat2 = makeStat(50);
|
||||
resourceMonitor.recordStats('c1', '/app', stat1);
|
||||
resourceMonitor.recordStats('c1', '/app', stat2);
|
||||
expect(resourceMonitor.getCurrentStats('c1').cpu.percent).toBe(50);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getHistoricalStats', () => {
|
||||
test('returns empty array for unknown container', () => {
|
||||
expect(resourceMonitor.getHistoricalStats('nonexistent')).toEqual([]);
|
||||
});
|
||||
|
||||
test('filters by time window', () => {
|
||||
const recent = makeStat(10, 50, new Date().toISOString());
|
||||
const old = makeStat(10, 50, new Date(Date.now() - 48 * 60 * 60 * 1000).toISOString());
|
||||
resourceMonitor.stats.set('c1', { name: '/app', history: [old, recent] });
|
||||
const result = resourceMonitor.getHistoricalStats('c1', 24);
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0]).toBe(recent);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAggregatedStats', () => {
|
||||
test('returns null for unknown container', () => {
|
||||
expect(resourceMonitor.getAggregatedStats('nonexistent')).toBeNull();
|
||||
});
|
||||
|
||||
test('returns null when no recent history', () => {
|
||||
const old = makeStat(10, 50, new Date(Date.now() - 48 * 60 * 60 * 1000).toISOString());
|
||||
resourceMonitor.stats.set('c1', { name: '/app', history: [old] });
|
||||
expect(resourceMonitor.getAggregatedStats('c1', 24)).toBeNull();
|
||||
});
|
||||
|
||||
test('calculates correct avg/min/max for CPU', () => {
|
||||
const now = new Date().toISOString();
|
||||
resourceMonitor.stats.set('c1', {
|
||||
name: '/app',
|
||||
history: [makeStat(10, 50, now), makeStat(30, 50, now), makeStat(20, 50, now)],
|
||||
});
|
||||
const agg = resourceMonitor.getAggregatedStats('c1', 24);
|
||||
expect(agg.cpu.avg).toBe(20);
|
||||
expect(agg.cpu.min).toBe(10);
|
||||
expect(agg.cpu.max).toBe(30);
|
||||
});
|
||||
|
||||
test('calculates correct avg/min/max for memory', () => {
|
||||
const now = new Date().toISOString();
|
||||
resourceMonitor.stats.set('c1', {
|
||||
name: '/app',
|
||||
history: [makeStat(10, 40, now), makeStat(10, 60, now), makeStat(10, 80, now)],
|
||||
});
|
||||
const agg = resourceMonitor.getAggregatedStats('c1', 24);
|
||||
expect(agg.memory.avg).toBe(60);
|
||||
expect(agg.memory.min).toBe(40);
|
||||
expect(agg.memory.max).toBe(80);
|
||||
});
|
||||
|
||||
test('includes dataPoints and timeRange', () => {
|
||||
const now = new Date().toISOString();
|
||||
resourceMonitor.stats.set('c1', { name: '/app', history: [makeStat(10, 50, now)] });
|
||||
const agg = resourceMonitor.getAggregatedStats('c1', 24);
|
||||
expect(agg.dataPoints).toBe(1);
|
||||
expect(agg.timeRange).toBe(24);
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkAlerts', () => {
|
||||
test('does nothing when alert config is missing', () => {
|
||||
const handler = jest.fn();
|
||||
resourceMonitor.on('alert', handler);
|
||||
resourceMonitor.checkAlerts('c1', '/app', makeStat(99));
|
||||
expect(handler).not.toHaveBeenCalled();
|
||||
resourceMonitor.removeListener('alert', handler);
|
||||
});
|
||||
|
||||
test('does nothing when alerts are disabled', () => {
|
||||
resourceMonitor.alerts.set('c1', { enabled: false, cpuThreshold: 50 });
|
||||
const handler = jest.fn();
|
||||
resourceMonitor.on('alert', handler);
|
||||
resourceMonitor.checkAlerts('c1', '/app', makeStat(99));
|
||||
expect(handler).not.toHaveBeenCalled();
|
||||
resourceMonitor.removeListener('alert', handler);
|
||||
});
|
||||
|
||||
test('triggers CPU alert when threshold exceeded', () => {
|
||||
resourceMonitor.alerts.set('c1', { enabled: true, cpuThreshold: 50, cooldownMinutes: 0 });
|
||||
const handler = jest.fn();
|
||||
resourceMonitor.on('alert', handler);
|
||||
resourceMonitor.checkAlerts('c1', '/app', makeStat(75));
|
||||
expect(handler).toHaveBeenCalled();
|
||||
const alertData = handler.mock.calls[0][0];
|
||||
expect(alertData.alerts[0].type).toBe('cpu');
|
||||
resourceMonitor.removeListener('alert', handler);
|
||||
});
|
||||
|
||||
test('triggers memory alert when threshold exceeded', () => {
|
||||
resourceMonitor.alerts.set('c1', { enabled: true, memoryThreshold: 70, cooldownMinutes: 0 });
|
||||
const handler = jest.fn();
|
||||
resourceMonitor.on('alert', handler);
|
||||
resourceMonitor.checkAlerts('c1', '/app', makeStat(10, 80));
|
||||
expect(handler).toHaveBeenCalled();
|
||||
const alertData = handler.mock.calls[0][0];
|
||||
expect(alertData.alerts[0].type).toBe('memory');
|
||||
resourceMonitor.removeListener('alert', handler);
|
||||
});
|
||||
|
||||
test('respects cooldown period', () => {
|
||||
resourceMonitor.alerts.set('c1', { enabled: true, cpuThreshold: 50, cooldownMinutes: 15 });
|
||||
resourceMonitor.lastAlerts.set('c1', Date.now()); // Just alerted
|
||||
const handler = jest.fn();
|
||||
resourceMonitor.on('alert', handler);
|
||||
resourceMonitor.checkAlerts('c1', '/app', makeStat(99));
|
||||
expect(handler).not.toHaveBeenCalled();
|
||||
resourceMonitor.removeListener('alert', handler);
|
||||
});
|
||||
|
||||
test('does not trigger when below threshold', () => {
|
||||
resourceMonitor.alerts.set('c1', { enabled: true, cpuThreshold: 90, cooldownMinutes: 0 });
|
||||
const handler = jest.fn();
|
||||
resourceMonitor.on('alert', handler);
|
||||
resourceMonitor.checkAlerts('c1', '/app', makeStat(50));
|
||||
expect(handler).not.toHaveBeenCalled();
|
||||
resourceMonitor.removeListener('alert', handler);
|
||||
});
|
||||
});
|
||||
|
||||
describe('setAlertConfig / getAlertConfig / removeAlertConfig', () => {
|
||||
test('stores alert config', () => {
|
||||
resourceMonitor.setAlertConfig('c1', { cpuThreshold: 80 });
|
||||
expect(resourceMonitor.alerts.has('c1')).toBe(true);
|
||||
});
|
||||
|
||||
test('retrieves stored config', () => {
|
||||
resourceMonitor.setAlertConfig('c1', { cpuThreshold: 80 });
|
||||
const config = resourceMonitor.getAlertConfig('c1');
|
||||
expect(config.cpuThreshold).toBe(80);
|
||||
});
|
||||
|
||||
test('returns null for non-existent config', () => {
|
||||
expect(resourceMonitor.getAlertConfig('nonexistent')).toBeNull();
|
||||
});
|
||||
|
||||
test('removes config and last alert', () => {
|
||||
resourceMonitor.setAlertConfig('c1', { cpuThreshold: 80 });
|
||||
resourceMonitor.lastAlerts.set('c1', Date.now());
|
||||
resourceMonitor.removeAlertConfig('c1');
|
||||
expect(resourceMonitor.alerts.has('c1')).toBe(false);
|
||||
expect(resourceMonitor.lastAlerts.has('c1')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAllStats', () => {
|
||||
test('returns empty object when no stats', () => {
|
||||
expect(resourceMonitor.getAllStats()).toEqual({});
|
||||
});
|
||||
|
||||
test('includes current and aggregated for each container', () => {
|
||||
const now = new Date().toISOString();
|
||||
resourceMonitor.stats.set('c1', { name: '/app', history: [makeStat(10, 50, now)] });
|
||||
const all = resourceMonitor.getAllStats();
|
||||
expect(all['c1']).toBeDefined();
|
||||
expect(all['c1'].name).toBe('/app');
|
||||
expect(all['c1'].current).toBeDefined();
|
||||
expect(all['c1'].aggregated).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('exportStats / importStats', () => {
|
||||
test('export returns object with stats and alerts', () => {
|
||||
const now = new Date().toISOString();
|
||||
resourceMonitor.stats.set('c1', { name: '/app', history: [makeStat(10, 50, now)] });
|
||||
resourceMonitor.alerts.set('c1', { enabled: true, cpuThreshold: 80 });
|
||||
const exported = resourceMonitor.exportStats();
|
||||
expect(exported.stats).toBeDefined();
|
||||
expect(exported.alerts).toBeDefined();
|
||||
expect(exported.exportedAt).toBeDefined();
|
||||
});
|
||||
|
||||
test('import restores stats from backup', () => {
|
||||
const backup = {
|
||||
stats: { 'c1': { name: '/app', history: [makeStat()] } },
|
||||
alerts: { 'c1': { enabled: true, cpuThreshold: 80 } },
|
||||
};
|
||||
resourceMonitor.importStats(backup);
|
||||
expect(resourceMonitor.stats.has('c1')).toBe(true);
|
||||
expect(resourceMonitor.alerts.has('c1')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cleanupOldStats', () => {
|
||||
test('removes entries older than retention period', () => {
|
||||
const old = makeStat(10, 50, new Date(Date.now() - 200 * 60 * 60 * 1000).toISOString());
|
||||
const recent = makeStat(10, 50, new Date().toISOString());
|
||||
resourceMonitor.stats.set('c1', { name: '/app', history: [old, recent] });
|
||||
resourceMonitor.cleanupOldStats();
|
||||
expect(resourceMonitor.stats.get('c1').history).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('deletes container entirely when no recent data', () => {
|
||||
const old = makeStat(10, 50, new Date(Date.now() - 200 * 60 * 60 * 1000).toISOString());
|
||||
resourceMonitor.stats.set('c1', { name: '/app', history: [old] });
|
||||
resourceMonitor.cleanupOldStats();
|
||||
expect(resourceMonitor.stats.has('c1')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('start / stop', () => {
|
||||
test('start sets monitoring flag', () => {
|
||||
jest.useFakeTimers();
|
||||
resourceMonitor.start();
|
||||
expect(resourceMonitor.monitoring).toBe(true);
|
||||
resourceMonitor.stop();
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
test('stop clears interval', () => {
|
||||
jest.useFakeTimers();
|
||||
resourceMonitor.start();
|
||||
resourceMonitor.stop();
|
||||
expect(resourceMonitor.monitoring).toBe(false);
|
||||
expect(resourceMonitor.monitoringInterval).toBeNull();
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
test('start is idempotent', () => {
|
||||
jest.useFakeTimers();
|
||||
resourceMonitor.start();
|
||||
const first = resourceMonitor.monitoringInterval;
|
||||
resourceMonitor.start();
|
||||
expect(resourceMonitor.monitoringInterval).toBe(first);
|
||||
resourceMonitor.stop();
|
||||
jest.useRealTimers();
|
||||
});
|
||||
});
|
||||
@@ -1,721 +0,0 @@
|
||||
/**
|
||||
* Security Regression Tests
|
||||
*
|
||||
* Tests for all 24 security fixes applied to DashCaddy.
|
||||
* These tests verify that previously-fixed vulnerabilities remain patched.
|
||||
* Grouped by the module/route they protect.
|
||||
*/
|
||||
|
||||
const request = require('supertest');
|
||||
const fs = require('fs');
|
||||
const fsp = require('fs').promises;
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const crypto = require('crypto');
|
||||
|
||||
const tmpDir = path.join(os.tmpdir(), `security-tests-${Date.now()}`);
|
||||
fs.mkdirSync(tmpDir, { recursive: true });
|
||||
|
||||
const testServicesFile = path.join(tmpDir, 'services.json');
|
||||
const testConfigFile = path.join(tmpDir, 'config.json');
|
||||
const testCaddyfile = path.join(tmpDir, 'Caddyfile');
|
||||
const testCredentialsFile = path.join(tmpDir, 'credentials.json');
|
||||
const testTotpConfigFile = path.join(tmpDir, 'totp-config.json');
|
||||
const testErrorLogFile = path.join(tmpDir, 'error.log');
|
||||
|
||||
process.env.SERVICES_FILE = testServicesFile;
|
||||
process.env.CONFIG_FILE = testConfigFile;
|
||||
process.env.CADDYFILE_PATH = testCaddyfile;
|
||||
process.env.CREDENTIALS_FILE = testCredentialsFile;
|
||||
process.env.ENABLE_HEALTH_CHECKER = 'false';
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
fs.writeFileSync(testServicesFile, '[]', 'utf8');
|
||||
fs.writeFileSync(testConfigFile, '{}', 'utf8');
|
||||
fs.writeFileSync(testCaddyfile, '# Test Caddyfile\n', 'utf8');
|
||||
|
||||
const app = require('../server');
|
||||
|
||||
afterAll(() => {
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// CREDENTIAL MANAGER — Cache TTL
|
||||
// ============================================================
|
||||
describe('Credential Manager Cache TTL', () => {
|
||||
const CredentialManager = require('../credential-manager').constructor;
|
||||
|
||||
test('cache entries should have expiration timestamps', () => {
|
||||
const cm = new CredentialManager();
|
||||
cm.cache.set('test.key', { value: 'secret', exp: Date.now() + 300000 });
|
||||
const cached = cm.cache.get('test.key');
|
||||
expect(cached).toHaveProperty('exp');
|
||||
expect(cached.exp).toBeGreaterThan(Date.now());
|
||||
});
|
||||
|
||||
test('expired cache entries should not be returned by retrieve', async () => {
|
||||
const cm = new CredentialManager();
|
||||
// Set an expired entry
|
||||
cm.cache.set('expired.key', { value: 'old-secret', exp: Date.now() - 1000 });
|
||||
// retrieve() checks cache TTL — expired entry should be deleted
|
||||
// Since there's no file backing, it will return null
|
||||
const result = await cm.retrieve('expired.key');
|
||||
expect(result).toBeNull();
|
||||
expect(cm.cache.has('expired.key')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// CRYPTO UTILS — Key Rotation
|
||||
// ============================================================
|
||||
describe('Crypto Utils — Key Rotation', () => {
|
||||
const cryptoUtils = require('../crypto-utils');
|
||||
|
||||
test('rotateKey should be exported and callable', () => {
|
||||
// rotateKey writes to disk so just verify the function exists and signature
|
||||
expect(typeof cryptoUtils.rotateKey).toBe('function');
|
||||
});
|
||||
|
||||
test('decryptWithKey should decrypt with specified key', () => {
|
||||
const key = crypto.randomBytes(32);
|
||||
const iv = crypto.randomBytes(12);
|
||||
const cipher = crypto.createCipheriv('aes-256-gcm', key, iv);
|
||||
let encrypted = cipher.update('test-data', 'utf8', 'base64');
|
||||
encrypted += cipher.final('base64');
|
||||
const authTag = cipher.getAuthTag();
|
||||
const encStr = `${iv.toString('base64')}:${authTag.toString('base64')}:${encrypted}`;
|
||||
|
||||
const result = cryptoUtils.decryptWithKey(encStr, key);
|
||||
expect(result).toBe('test-data');
|
||||
});
|
||||
|
||||
test('decryptWithKey should reject invalid format', () => {
|
||||
const key = crypto.randomBytes(32);
|
||||
expect(() => cryptoUtils.decryptWithKey('invalid-no-colons', key)).toThrow('Invalid encrypted data format');
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// TOTP — Disable requires code verification
|
||||
// ============================================================
|
||||
describe('TOTP Disable Security', () => {
|
||||
test('POST /api/totp/disable should reject missing code when TOTP is active', async () => {
|
||||
// This tests that disabling TOTP requires a valid code
|
||||
// When TOTP is not set up, the endpoint just disables it
|
||||
// But when it IS set up, code is mandatory
|
||||
const res = await request(app)
|
||||
.post('/api/totp/disable')
|
||||
.send({});
|
||||
|
||||
// If TOTP isn't set up in test env, it will succeed (200)
|
||||
// The important thing is it doesn't crash
|
||||
expect([200, 400, 401]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('POST /api/totp/disable should reject non-6-digit code', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/totp/disable')
|
||||
.send({ code: 'abc' });
|
||||
|
||||
// If TOTP is active, should reject non-numeric codes
|
||||
expect([200, 400, 401]).toContain(res.statusCode);
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// SITES — Caddy reload error leak prevention
|
||||
// ============================================================
|
||||
describe('Sites Route Security', () => {
|
||||
test('POST /api/site should reject invalid domain format', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/site')
|
||||
.send({ domain: '<script>alert(1)</script>', upstream: 'localhost:8080' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
expect(res.body.error).toContain('DC-301');
|
||||
});
|
||||
|
||||
test('POST /api/site should reject invalid upstream format', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/site')
|
||||
.send({ domain: 'test.sami', upstream: 'not-valid' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
expect(res.body.error).toContain('upstream');
|
||||
});
|
||||
|
||||
test('POST /api/site/external should reject URLs with Caddyfile injection chars', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/site/external')
|
||||
.send({
|
||||
subdomain: 'test',
|
||||
externalUrl: 'https://evil.com/path{inject}',
|
||||
});
|
||||
|
||||
// Should be rejected — either 400 (our validation) or 500 (URL constructor throws on {})
|
||||
expect([400, 500]).toContain(res.statusCode);
|
||||
// Must never succeed
|
||||
expect(res.statusCode).not.toBe(200);
|
||||
});
|
||||
|
||||
test('POST /api/site/external should reject URLs with newlines', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/site/external')
|
||||
.send({
|
||||
subdomain: 'test',
|
||||
externalUrl: 'https://evil.com/path\nreverse_proxy malicious:1234',
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test('POST /api/site/external should reject missing fields', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/site/external')
|
||||
.send({});
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test('POST /api/site/external should reject invalid subdomain', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/site/external')
|
||||
.send({
|
||||
subdomain: '../etc/passwd',
|
||||
externalUrl: 'https://example.com',
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// ERROR LOGS — No stack trace leak
|
||||
// ============================================================
|
||||
describe('Error Logs — No Stack Trace Leak', () => {
|
||||
beforeAll(async () => {
|
||||
// Write a fake error log with stack traces
|
||||
const logContent = [
|
||||
'[2026-03-07 12:00:00] server: Something failed',
|
||||
'Error: Internal failure',
|
||||
' at Object.<anonymous> (/app/server.js:123:45)',
|
||||
' at Module._compile (node:internal/modules/cjs/loader:1234:14)',
|
||||
'================================================================================',
|
||||
'[2026-03-07 12:01:00] dns: DNS timeout',
|
||||
'Error: connect ECONNREFUSED 192.168.1.1:5380',
|
||||
' at TCPConnectWrap.afterConnect [as oncomplete] (node:net:1234:16)',
|
||||
'================================================================================',
|
||||
].join('\n');
|
||||
// Write to the server's error log file location
|
||||
// The server uses ctx.ERROR_LOG_FILE — we need to check what that resolves to
|
||||
await fsp.writeFile(testErrorLogFile, logContent);
|
||||
});
|
||||
|
||||
test('GET /api/error-logs should not include details/stack traces', async () => {
|
||||
const res = await request(app).get('/api/error-logs');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
|
||||
// If there are logs, verify none contain 'details' field
|
||||
if (res.body.logs.length > 0) {
|
||||
for (const log of res.body.logs) {
|
||||
expect(log).not.toHaveProperty('details');
|
||||
// Verify it has the safe fields
|
||||
if (log.timestamp) {
|
||||
expect(log).toHaveProperty('timestamp');
|
||||
expect(log).toHaveProperty('context');
|
||||
expect(log).toHaveProperty('error');
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// CONTAINERS — ID validation
|
||||
// ============================================================
|
||||
describe('Container ID Validation', () => {
|
||||
test('GET /api/containers/:id/check-update should 404 for nonexistent container', async () => {
|
||||
const res = await request(app).get('/api/containers/nonexistent123/check-update');
|
||||
|
||||
// Should return 404 (not found) not 500 (unhandled error)
|
||||
expect([404]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('POST /api/containers/:id/update should 404 for nonexistent container', async () => {
|
||||
const res = await request(app).post('/api/containers/nonexistent123/update');
|
||||
|
||||
expect([404]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('GET /api/logs/container/:id should 404 for nonexistent container', async () => {
|
||||
const res = await request(app).get('/api/logs/container/nonexistent123');
|
||||
|
||||
expect([404]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('GET /api/logs/stream/:id should 404 for nonexistent container', async () => {
|
||||
const res = await request(app).get('/api/logs/stream/nonexistent123');
|
||||
|
||||
expect([404]).toContain(res.statusCode);
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// LOG FILE — Path traversal prevention
|
||||
// ============================================================
|
||||
describe('Log File Path Traversal', () => {
|
||||
test('GET /api/logs/file should reject missing path', async () => {
|
||||
const res = await request(app).get('/api/logs/file');
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test('GET /api/logs/file should reject traversal paths', async () => {
|
||||
const res = await request(app)
|
||||
.get('/api/logs/file')
|
||||
.query({ path: '/etc/shadow' });
|
||||
|
||||
// Should be 403 (not allowed) or 404 (not found), never 200
|
||||
expect([403, 404]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('GET /api/logs/file should reject Windows system paths', async () => {
|
||||
const res = await request(app)
|
||||
.get('/api/logs/file')
|
||||
.query({ path: 'C:\\Windows\\System32\\config\\SAM' });
|
||||
|
||||
expect([403, 404]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('GET /api/logs/file should reject parent directory traversal', async () => {
|
||||
const res = await request(app)
|
||||
.get('/api/logs/file')
|
||||
.query({ path: '/var/log/../../etc/passwd' });
|
||||
|
||||
expect([403, 404]).toContain(res.statusCode);
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// BACKUP — No encryption key in export, TOTP re-auth for restore
|
||||
// ============================================================
|
||||
describe('Backup Security', () => {
|
||||
test('GET /api/backup/export should not include encryption key', async () => {
|
||||
const res = await request(app).get('/api/backup/export');
|
||||
|
||||
if (res.statusCode === 200 && res.body.backup) {
|
||||
const backup = res.body.backup;
|
||||
// Verify encryptionKey is NOT in the backup files
|
||||
expect(backup.files).not.toHaveProperty('encryptionKey');
|
||||
// Verify TOTP backup doesn't include manualKey
|
||||
if (backup.totp) {
|
||||
expect(backup.totp).not.toHaveProperty('manualKey');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test('POST /api/backup/restore should reject invalid backup format', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/backup/restore')
|
||||
.send({ backup: { invalid: true } });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test('POST /api/backup/restore should not restore encryptionKey even if provided', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/backup/restore')
|
||||
.send({
|
||||
backup: {
|
||||
version: '1.0',
|
||||
files: {
|
||||
encryptionKey: {
|
||||
type: 'text',
|
||||
content: 'malicious-key-data',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// The encryptionKey should be skipped (not in fileMapping)
|
||||
if (res.statusCode === 200) {
|
||||
// If it succeeded, verify encryptionKey was skipped
|
||||
expect(res.body.results.restored).not.toContain('encryptionKey');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// SESSION COOKIE — Secure flag
|
||||
// ============================================================
|
||||
describe('Session Cookie Security', () => {
|
||||
test('session cookies should include Secure flag', async () => {
|
||||
// TOTP verify would set a session cookie on success
|
||||
// We can check the middleware by looking at any response that sets cookies
|
||||
const res = await request(app)
|
||||
.post('/api/totp/verify')
|
||||
.send({ code: '123456' });
|
||||
|
||||
// Even though verify fails, check cookie format if any cookies are set
|
||||
const cookies = res.headers['set-cookie'];
|
||||
if (cookies) {
|
||||
for (const cookie of Array.isArray(cookies) ? cookies : [cookies]) {
|
||||
if (cookie.includes('dashcaddy_session')) {
|
||||
expect(cookie.toLowerCase()).toContain('secure');
|
||||
expect(cookie.toLowerCase()).toContain('httponly');
|
||||
expect(cookie.toLowerCase()).toContain('samesite');
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// CUSTOM VOLUME — Host path validation
|
||||
// ============================================================
|
||||
describe('Custom Volume Path Validation', () => {
|
||||
// This tests the processTemplateVariables function indirectly
|
||||
// The helpers.js validates custom volume hostPath against allowed roots
|
||||
|
||||
test('should not allow arbitrary host paths in volume overrides', async () => {
|
||||
// Deploy endpoint would use processTemplateVariables
|
||||
// Sending a custom volume with a dangerous path
|
||||
const res = await request(app)
|
||||
.post('/api/apps/deploy')
|
||||
.send({
|
||||
appId: 'plex',
|
||||
subdomain: 'test-plex',
|
||||
ip: '192.168.1.100',
|
||||
port: '32400',
|
||||
customVolumes: [{
|
||||
containerPath: '/config',
|
||||
hostPath: '/etc/shadow',
|
||||
}],
|
||||
});
|
||||
|
||||
// The deploy will likely fail for other reasons (no Docker, etc.)
|
||||
// But if it reaches volume processing, the dangerous path should be rejected
|
||||
// The key check: it shouldn't return 200 with /etc/shadow mounted
|
||||
if (res.statusCode === 200) {
|
||||
// If somehow succeeded, verify the dangerous path wasn't used
|
||||
expect(JSON.stringify(res.body)).not.toContain('/etc/shadow');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// LOGO DELETE — Path traversal prevention
|
||||
// ============================================================
|
||||
describe('Logo Delete Path Traversal', () => {
|
||||
test('DELETE /api/logo should safely handle config with traversal paths', async () => {
|
||||
// Write config with a malicious logo path
|
||||
const configWithMaliciousLogo = {
|
||||
customLogo: '/assets/../../etc/passwd',
|
||||
customLogoDark: '/assets/../../../root/.ssh/id_rsa',
|
||||
};
|
||||
await fsp.writeFile(testConfigFile, JSON.stringify(configWithMaliciousLogo), 'utf8');
|
||||
|
||||
const res = await request(app).delete('/api/logo');
|
||||
|
||||
// Should succeed (reset branding) without deleting files outside assets dir
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
|
||||
// Reset config for other tests
|
||||
await fsp.writeFile(testConfigFile, '{}', 'utf8');
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// DNS — SSRF prevention (server parameter validation)
|
||||
// ============================================================
|
||||
describe('DNS Server SSRF Prevention', () => {
|
||||
test('DELETE /api/dns/record should not succeed with arbitrary server IPs', async () => {
|
||||
const res = await request(app)
|
||||
.delete('/api/dns/record')
|
||||
.query({
|
||||
domain: 'test.sami',
|
||||
type: 'A',
|
||||
server: '169.254.169.254', // AWS metadata endpoint
|
||||
});
|
||||
|
||||
// Must never succeed — 400 (server rejected), 401 (no token), or 500 (dns not configured in test)
|
||||
expect(res.statusCode).not.toBe(200);
|
||||
});
|
||||
|
||||
test('POST /api/dns/record should not succeed with arbitrary server IPs', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/dns/record')
|
||||
.send({
|
||||
domain: 'test.sami',
|
||||
ipAddress: '192.168.1.1',
|
||||
server: '10.0.0.1', // Not a configured DNS server
|
||||
});
|
||||
|
||||
expect(res.statusCode).not.toBe(200);
|
||||
});
|
||||
|
||||
test('GET /api/dns/resolve should not succeed with arbitrary server IPs', async () => {
|
||||
const res = await request(app)
|
||||
.get('/api/dns/resolve')
|
||||
.query({
|
||||
domain: 'test.sami',
|
||||
server: '127.0.0.1',
|
||||
});
|
||||
|
||||
expect(res.statusCode).not.toBe(200);
|
||||
});
|
||||
|
||||
test('GET /api/dns/logs should reject arbitrary server IPs', async () => {
|
||||
const res = await request(app)
|
||||
.get('/api/dns/logs')
|
||||
.query({ server: '192.168.1.1' });
|
||||
|
||||
expect([400]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('GET /api/dns/check-update should reject arbitrary server IPs', async () => {
|
||||
const res = await request(app)
|
||||
.get('/api/dns/check-update')
|
||||
.query({ server: '8.8.8.8' });
|
||||
|
||||
expect([400]).toContain(res.statusCode);
|
||||
});
|
||||
|
||||
test('POST /api/dns/update should reject arbitrary server IPs', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/dns/update')
|
||||
.query({ server: '1.1.1.1' });
|
||||
|
||||
expect([400]).toContain(res.statusCode);
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// _httpFetch — Response size limit
|
||||
// ============================================================
|
||||
describe('HTTP Fetch Response Size Limit', () => {
|
||||
// This is tested indirectly — the _httpFetch function has a 10MB limit
|
||||
// We can verify the constant exists by checking the server module
|
||||
test('server should define MAX_RESPONSE_SIZE constant', () => {
|
||||
// Read server.js and verify the limit is defined
|
||||
const serverSource = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'server.js'), 'utf8',
|
||||
);
|
||||
expect(serverSource).toContain('MAX_RESPONSE_SIZE');
|
||||
expect(serverSource).toContain('10 * 1024 * 1024');
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// MIDDLEWARE — Session cookie format
|
||||
// ============================================================
|
||||
describe('Middleware Security', () => {
|
||||
test('middleware should set Secure flag on cookies', () => {
|
||||
const middlewareSource = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'middleware.js'), 'utf8',
|
||||
);
|
||||
// Verify the Set-Cookie string includes Secure
|
||||
expect(middlewareSource).toContain('; Secure;');
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// SAVECONFIG — Atomic operations
|
||||
// ============================================================
|
||||
describe('Config Save Atomicity', () => {
|
||||
test('saveConfig should use state manager for locking', () => {
|
||||
const serverSource = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'server.js'), 'utf8',
|
||||
);
|
||||
// Verify saveConfig uses configStateManager.update (not raw fs.writeFile)
|
||||
expect(serverSource).toContain('configStateManager.update');
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// SITES — External URL validation
|
||||
// ============================================================
|
||||
describe('External URL Security', () => {
|
||||
test('sites.js should validate URL components for unsafe chars', () => {
|
||||
const sitesSource = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'sites.js'), 'utf8',
|
||||
);
|
||||
// Verify the unsafe character regex exists
|
||||
expect(sitesSource).toContain('unsafeCaddyChars');
|
||||
expect(sitesSource).toMatch(/[{}\\n\\r]/);
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// CREDENTIAL MANAGER — Locking
|
||||
// ============================================================
|
||||
describe('Credential Manager File Locking', () => {
|
||||
test('credential-manager should use proper-lockfile', () => {
|
||||
const cmSource = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'credential-manager.js'), 'utf8',
|
||||
);
|
||||
expect(cmSource).toContain('proper-lockfile');
|
||||
expect(cmSource).toContain('_lockedUpdate');
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// TOTP CONFIG — No plaintext secret in file
|
||||
// ============================================================
|
||||
describe('TOTP Config File Security', () => {
|
||||
test('loadTotpConfig should delete secret from file data', () => {
|
||||
const serverSource = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'server.js'), 'utf8',
|
||||
);
|
||||
// Verify the secret deletion exists in loadTotpConfig
|
||||
expect(serverSource).toContain('delete loaded.secret');
|
||||
});
|
||||
|
||||
test('totp verify-setup should not write secret to config file', () => {
|
||||
const totpSource = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'auth', 'totp.js'), 'utf8',
|
||||
);
|
||||
// Verify totpConfig.secret assignment is NOT present
|
||||
expect(totpSource).not.toContain('totpConfig.secret = pendingSecret');
|
||||
expect(totpSource).not.toContain('totpConfig.secret =');
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// HELPERS — Volume path validation
|
||||
// ============================================================
|
||||
describe('Helpers — Volume Security', () => {
|
||||
test('helpers.js should validate hostPath against allowed roots', () => {
|
||||
const helpersSource = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'apps', 'helpers.js'), 'utf8',
|
||||
);
|
||||
expect(helpersSource).toContain('allowedRoots');
|
||||
expect(helpersSource).toContain('platformPaths.dockerData');
|
||||
expect(helpersSource).toContain('Custom volume host path rejected');
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// ERROR LOGS — No details field
|
||||
// ============================================================
|
||||
describe('Error Logs — Response Format', () => {
|
||||
test('errorlogs.js should not include details field', () => {
|
||||
const source = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'errorlogs.js'), 'utf8',
|
||||
);
|
||||
// The parsed log object should only have timestamp, context, error
|
||||
// NOT details (which contains stack traces)
|
||||
const returnBlock = source.match(/return \{[\s\S]*?\}/);
|
||||
if (returnBlock) {
|
||||
expect(returnBlock[0]).not.toContain('details');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// ASSETS — path.basename for logo deletion
|
||||
// ============================================================
|
||||
describe('Assets — Logo Path Safety', () => {
|
||||
test('assets.js should use path.basename for logo filename extraction', () => {
|
||||
const source = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'config', 'assets.js'), 'utf8',
|
||||
);
|
||||
expect(source).toContain('path.basename(logoPath)');
|
||||
// Should NOT use string replace for path extraction
|
||||
expect(source).not.toContain("logoPath.replace('/assets/', '')");
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// BACKUP — encryptionKey excluded
|
||||
// ============================================================
|
||||
describe('Backup — Encryption Key Exclusion', () => {
|
||||
test('backup.js should not include encryptionKey in filesToBackup', () => {
|
||||
const source = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'config', 'backup.js'), 'utf8',
|
||||
);
|
||||
// Should have a comment about deliberate exclusion
|
||||
expect(source).toContain('encryptionKey deliberately excluded');
|
||||
// Should NOT have encryptionKey as a key in filesToBackup array
|
||||
expect(source).not.toMatch(/\{\s*key:\s*'encryptionKey'/);
|
||||
});
|
||||
|
||||
test('backup.js restore fileMapping should not include encryptionKey', () => {
|
||||
const source = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'config', 'backup.js'), 'utf8',
|
||||
);
|
||||
// The RESTORE route's fileMapping (after "encryptionKey excluded" comment) must not have it
|
||||
// The preview route's fileMapping is allowed to have it (informational only)
|
||||
const restoreSection = source.substring(source.indexOf('encryptionKey excluded'));
|
||||
const restoreMapping = restoreSection.match(/const fileMapping = \{[\s\S]*?\};/);
|
||||
if (restoreMapping) {
|
||||
expect(restoreMapping[0]).not.toContain('encryptionKey:');
|
||||
}
|
||||
});
|
||||
|
||||
test('backup.js should require TOTP for sensitive restores', () => {
|
||||
const source = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'config', 'backup.js'), 'utf8',
|
||||
);
|
||||
expect(source).toContain('sensitiveKeys');
|
||||
expect(source).toContain('totpCode');
|
||||
expect(source).toContain('TOTP code required');
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// DNS — validateDnsServer function
|
||||
// ============================================================
|
||||
describe('DNS — Server Validation Function', () => {
|
||||
test('dns.js should define validateDnsServer', () => {
|
||||
const source = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'dns.js'), 'utf8',
|
||||
);
|
||||
expect(source).toContain('function validateDnsServer');
|
||||
expect(source).toContain('configuredIps');
|
||||
expect(source).toContain('validatorLib.isIP');
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// CONTAINERS — getVerifiedContainer usage
|
||||
// ============================================================
|
||||
describe('Containers — Verified Container Access', () => {
|
||||
test('containers.js update route should use getVerifiedContainer', () => {
|
||||
const source = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'containers.js'), 'utf8',
|
||||
);
|
||||
// update and check-update should both use getVerifiedContainer
|
||||
const updateSection = source.substring(source.indexOf("'/:id/update'"));
|
||||
expect(updateSection).toContain('getVerifiedContainer');
|
||||
|
||||
const checkUpdateSection = source.substring(source.indexOf("'/:id/check-update'"));
|
||||
expect(checkUpdateSection).toContain('getVerifiedContainer');
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// LOGS — Symlink resolution
|
||||
// ============================================================
|
||||
describe('Logs — Symlink Resolution', () => {
|
||||
test('logs.js should use realpath for symlink resolution', () => {
|
||||
const source = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'logs.js'), 'utf8',
|
||||
);
|
||||
expect(source).toContain('fsp.realpath');
|
||||
expect(source).toContain('path.sep');
|
||||
});
|
||||
|
||||
test('logs.js container routes should verify container exists', () => {
|
||||
const source = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'routes', 'logs.js'), 'utf8',
|
||||
);
|
||||
// Both container/:id and stream/:id should have inspect + NotFoundError
|
||||
expect(source).toContain('container.inspect()');
|
||||
expect(source).toContain('NotFoundError');
|
||||
});
|
||||
});
|
||||
@@ -1,417 +0,0 @@
|
||||
/**
|
||||
* Integration tests for server.js input validation
|
||||
* Tests that routes properly reject invalid input before reaching business logic
|
||||
*/
|
||||
|
||||
const request = require('supertest');
|
||||
const app = require('../server');
|
||||
|
||||
describe('POST /api/assets/upload - directory traversal prevention', () => {
|
||||
test('rejects filename with path separators', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/assets/upload')
|
||||
.send({ filename: '../../../etc/passwd', data: 'data:image/png;base64,iVBOR' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/path separator/i);
|
||||
});
|
||||
|
||||
test('rejects filename with backslash', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/assets/upload')
|
||||
.send({ filename: '..\\..\\config.json', data: 'data:image/png;base64,iVBOR' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/path separator/i);
|
||||
});
|
||||
|
||||
test('rejects filename with dot-dot', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/assets/upload')
|
||||
.send({ filename: '..evil.png', data: 'data:image/png;base64,iVBOR' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/path separator/i);
|
||||
});
|
||||
|
||||
test('rejects missing fields', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/assets/upload')
|
||||
.send({ filename: 'test.png' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/required/i);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/site - Caddyfile injection prevention', () => {
|
||||
test('rejects invalid domain format', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/site')
|
||||
.send({ domain: 'evil;rm -rf /', upstream: '127.0.0.1:8080' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid domain/i);
|
||||
});
|
||||
|
||||
test('rejects domain with spaces', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/site')
|
||||
.send({ domain: 'evil domain', upstream: '127.0.0.1:8080' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid domain/i);
|
||||
});
|
||||
|
||||
test('rejects invalid upstream format', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/site')
|
||||
.send({ domain: 'test.sami', upstream: 'not a valid upstream' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid upstream/i);
|
||||
});
|
||||
|
||||
test('rejects missing fields', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/site')
|
||||
.send({ domain: 'test.sami' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/required/i);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/site/external - URL and subdomain validation', () => {
|
||||
test('rejects invalid subdomain', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/site/external')
|
||||
.send({ subdomain: '-invalid', externalUrl: 'https://example.com' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid subdomain/i);
|
||||
});
|
||||
|
||||
test('rejects subdomain with special chars', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/site/external')
|
||||
.send({ subdomain: 'test;evil', externalUrl: 'https://example.com' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid subdomain/i);
|
||||
});
|
||||
|
||||
test('rejects invalid URL', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/site/external')
|
||||
.send({ subdomain: 'myapp', externalUrl: 'not-a-url' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.success).toBe(false);
|
||||
});
|
||||
|
||||
test('rejects missing fields', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/site/external')
|
||||
.send({ subdomain: 'myapp' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/required/i);
|
||||
});
|
||||
});
|
||||
|
||||
// DNS routes require a token to bypass the 401 token check and reach validation
|
||||
const FAKE_TOKEN = 'aaaa1111bbbb2222cccc3333dddd4444';
|
||||
|
||||
describe('POST /api/dns/record - DNS injection prevention', () => {
|
||||
test('rejects invalid domain format', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/dns/record')
|
||||
.send({ domain: 'evil;command', ip: '10.0.0.1', token: FAKE_TOKEN });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid domain/i);
|
||||
});
|
||||
|
||||
test('rejects invalid IP address', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/dns/record')
|
||||
.send({ domain: 'test.sami', ip: 'not-an-ip', token: FAKE_TOKEN });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid ip/i);
|
||||
});
|
||||
|
||||
test('rejects TTL out of range (too low)', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/dns/record')
|
||||
.send({ domain: 'test.sami', ip: '10.0.0.1', ttl: 5, token: FAKE_TOKEN });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/ttl/i);
|
||||
});
|
||||
|
||||
test('rejects TTL out of range (too high)', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/dns/record')
|
||||
.send({ domain: 'test.sami', ip: '10.0.0.1', ttl: 100000, token: FAKE_TOKEN });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/ttl/i);
|
||||
});
|
||||
|
||||
test('rejects invalid server IP', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/dns/record')
|
||||
.send({ domain: 'test.sami', ip: '10.0.0.1', server: 'not-an-ip', token: FAKE_TOKEN });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid dns server/i);
|
||||
});
|
||||
|
||||
test('rejects missing fields', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/dns/record')
|
||||
.send({ domain: 'test.sami', token: FAKE_TOKEN });
|
||||
expect(res.status).toBe(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /api/dns/record - DNS injection prevention', () => {
|
||||
test('rejects invalid domain', async () => {
|
||||
const res = await request(app)
|
||||
.delete('/api/dns/record')
|
||||
.query({ domain: 'evil;drop table', token: 'abc123def456' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid domain/i);
|
||||
});
|
||||
|
||||
test('rejects invalid record type', async () => {
|
||||
const res = await request(app)
|
||||
.delete('/api/dns/record')
|
||||
.query({ domain: 'test.sami', type: 'INVALID', token: 'abc123def456' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid dns record type/i);
|
||||
});
|
||||
|
||||
test('rejects invalid ipAddress', async () => {
|
||||
const res = await request(app)
|
||||
.delete('/api/dns/record')
|
||||
.query({ domain: 'test.sami', ipAddress: 'not-ip', token: 'abc123def456' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid ip/i);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/dns/resolve - DNS injection prevention', () => {
|
||||
test('rejects invalid domain', async () => {
|
||||
const res = await request(app)
|
||||
.get('/api/dns/resolve')
|
||||
.query({ domain: 'evil;command', token: FAKE_TOKEN });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid domain/i);
|
||||
});
|
||||
|
||||
test('rejects invalid server IP', async () => {
|
||||
const res = await request(app)
|
||||
.get('/api/dns/resolve')
|
||||
.query({ domain: 'test.sami', server: 'not-an-ip', token: FAKE_TOKEN });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid dns server/i);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/apps/deploy - deployment validation', () => {
|
||||
test('rejects invalid subdomain', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/apps/deploy')
|
||||
.send({ appId: 'plex', config: { subdomain: '-bad-sub' } });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid subdomain/i);
|
||||
});
|
||||
|
||||
test('rejects invalid port', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/apps/deploy')
|
||||
.send({ appId: 'plex', config: { subdomain: 'test', port: 99999 } });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid port/i);
|
||||
});
|
||||
|
||||
test('rejects invalid IP', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/apps/deploy')
|
||||
.send({ appId: 'plex', config: { subdomain: 'test', ip: 'not-ip' } });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid ip/i);
|
||||
});
|
||||
|
||||
test('rejects unknown template', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/apps/deploy')
|
||||
.send({ appId: 'nonexistent-app-xyz', config: { subdomain: 'test' } });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid app template/i);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/dns/credentials - credential validation', () => {
|
||||
test('rejects missing fields', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/dns/credentials')
|
||||
.send({ username: 'admin' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/required/i);
|
||||
});
|
||||
|
||||
test('rejects username exceeding max length', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/dns/credentials')
|
||||
.send({ username: 'a'.repeat(101), password: 'secret' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/maximum length/i);
|
||||
});
|
||||
|
||||
test('rejects username with injection chars', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/dns/credentials')
|
||||
.send({ username: 'admin;rm -rf /', password: 'secret' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid characters/i);
|
||||
});
|
||||
|
||||
test('rejects username with pipe', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/dns/credentials')
|
||||
.send({ username: 'admin|evil', password: 'secret' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid characters/i);
|
||||
});
|
||||
|
||||
test('rejects invalid server IP', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/dns/credentials')
|
||||
.send({ username: 'admin', password: 'secret', server: 'not-ip' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid dns server/i);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/services - service config validation', () => {
|
||||
test('rejects missing fields', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'test' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/required/i);
|
||||
});
|
||||
|
||||
test('rejects invalid service id format', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services')
|
||||
.send({ id: 'invalid id with spaces!', name: 'Test' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /api/services - bulk import validation', () => {
|
||||
test('rejects non-array body', async () => {
|
||||
const res = await request(app)
|
||||
.put('/api/services')
|
||||
.send({ id: 'test', name: 'Test' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/array/i);
|
||||
});
|
||||
|
||||
test('rejects service with invalid id', async () => {
|
||||
const res = await request(app)
|
||||
.put('/api/services')
|
||||
.send([{ id: 'invalid id!', name: 'Test' }]);
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/services/update - service update validation', () => {
|
||||
test('rejects missing subdomains', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services/update')
|
||||
.send({ oldSubdomain: 'test' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/required/i);
|
||||
});
|
||||
|
||||
test('rejects invalid subdomain format', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services/update')
|
||||
.send({ oldSubdomain: '-bad', newSubdomain: 'good' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid subdomain/i);
|
||||
});
|
||||
|
||||
test('rejects invalid port', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services/update')
|
||||
.send({ oldSubdomain: 'old', newSubdomain: 'new', port: 70000 });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid port/i);
|
||||
});
|
||||
|
||||
test('rejects invalid IP', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/services/update')
|
||||
.send({ oldSubdomain: 'old', newSubdomain: 'new', ip: 'not-ip' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/invalid ip/i);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/arr/test-connection - SSRF prevention', () => {
|
||||
test('rejects invalid URL', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/arr/test-connection')
|
||||
.send({ service: 'radarr', url: 'not-a-url', apiKey: 'abc123def456' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.success).toBe(false);
|
||||
});
|
||||
|
||||
test('rejects invalid API key format', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/arr/test-connection')
|
||||
.send({ service: 'radarr', url: 'http://localhost:7878', apiKey: 'a;b' });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/notifications/config - notification provider validation', () => {
|
||||
test('rejects invalid Discord webhook URL', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/notifications/config')
|
||||
.send({ providers: { discord: { webhookUrl: 'not-a-url' } } });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/discord webhook/i);
|
||||
});
|
||||
|
||||
test('rejects invalid ntfy server URL', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/notifications/config')
|
||||
.send({ providers: { ntfy: { serverUrl: 'ftp://bad' } } });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/ntfy server/i);
|
||||
});
|
||||
|
||||
test('rejects invalid ntfy topic', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/notifications/config')
|
||||
.send({ providers: { ntfy: { topic: 'has spaces and $pecial!' } } });
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toMatch(/ntfy topic/i);
|
||||
});
|
||||
|
||||
test('accepts valid config', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/notifications/config')
|
||||
.send({ enabled: true });
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Rate limiting headers', () => {
|
||||
test('returns rate limit headers on API responses', async () => {
|
||||
const res = await request(app).get('/api/health');
|
||||
// Health endpoint is skipped by rate limiter, but general endpoints should have headers
|
||||
expect(res.status).toBe(200);
|
||||
});
|
||||
|
||||
test('general API endpoint has rate limiting configured', async () => {
|
||||
const res = await request(app).get('/api/services');
|
||||
// Rate limiting is skipped in test env, so verify the endpoint is accessible
|
||||
expect(res.status).toBe(200);
|
||||
});
|
||||
});
|
||||
@@ -1,104 +0,0 @@
|
||||
/**
|
||||
* Sites Route Tests
|
||||
*
|
||||
* Tests Caddyfile management, site configuration, and external site endpoints
|
||||
*/
|
||||
|
||||
const request = require('supertest');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
const testServicesFile = path.join(os.tmpdir(), `sites-services-${Date.now()}.json`);
|
||||
const testConfigFile = path.join(os.tmpdir(), `sites-config-${Date.now()}.json`);
|
||||
const testCaddyfile = path.join(os.tmpdir(), `sites-Caddyfile-${Date.now()}`);
|
||||
|
||||
process.env.SERVICES_FILE = testServicesFile;
|
||||
process.env.CONFIG_FILE = testConfigFile;
|
||||
process.env.CADDYFILE_PATH = testCaddyfile;
|
||||
process.env.ENABLE_HEALTH_CHECKER = 'false';
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
fs.writeFileSync(testServicesFile, '[]', 'utf8');
|
||||
fs.writeFileSync(testConfigFile, '{}', 'utf8');
|
||||
fs.writeFileSync(testCaddyfile, '# Test Caddyfile', 'utf8');
|
||||
|
||||
const app = require('../server');
|
||||
|
||||
describe('Sites Routes', () => {
|
||||
afterAll(() => {
|
||||
for (const f of [testServicesFile, testConfigFile, testCaddyfile]) {
|
||||
try { fs.unlinkSync(f); } catch (e) { /* ignore */ }
|
||||
}
|
||||
});
|
||||
|
||||
describe('GET /api/caddyfile', () => {
|
||||
test('should return Caddyfile contents', async () => {
|
||||
const res = await request(app).get('/api/caddyfile');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body.content).toContain('Test Caddyfile');
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/apps/templates', () => {
|
||||
test('should return all templates with categories', async () => {
|
||||
const res = await request(app).get('/api/apps/templates');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body).toHaveProperty('templates');
|
||||
expect(res.body).toHaveProperty('categories');
|
||||
expect(Object.keys(res.body.templates).length).toBeGreaterThan(50);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/apps/templates/:appId', () => {
|
||||
test('should return specific template', async () => {
|
||||
const res = await request(app).get('/api/apps/templates/plex');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body.template.name).toBe('Plex');
|
||||
expect(res.body.template.docker).toBeDefined();
|
||||
});
|
||||
|
||||
test('should return 404 for unknown template', async () => {
|
||||
const res = await request(app).get('/api/apps/templates/nonexistent');
|
||||
|
||||
expect(res.statusCode).toBe(404);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/site/external', () => {
|
||||
test('should reject missing required fields', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/site/external')
|
||||
.send({});
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test('should reject invalid subdomain', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/site/external')
|
||||
.send({
|
||||
subdomain: 'INVALID SUBDOMAIN!',
|
||||
targetUrl: 'https://example.com',
|
||||
name: 'Test',
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/caddy/cas', () => {
|
||||
test('should return CA list from Caddyfile', async () => {
|
||||
const res = await request(app).get('/api/caddy/cas');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.status).toBe('success');
|
||||
expect(Array.isArray(res.body.data.cas)).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,249 +0,0 @@
|
||||
/**
|
||||
* State Manager Tests
|
||||
*
|
||||
* Tests the thread-safe state management with file locking
|
||||
*/
|
||||
|
||||
const StateManager = require('../state-manager');
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
// Dedicated temp subdirectory avoids cross-test file collisions
|
||||
const testDir = path.join(os.tmpdir(), `state-manager-test-${Date.now()}`);
|
||||
const testFile = path.join(testDir, 'test-state.json');
|
||||
|
||||
describe('StateManager', () => {
|
||||
let stateManager;
|
||||
|
||||
beforeAll(async () => {
|
||||
await fs.mkdir(testDir, { recursive: true });
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
// Clean up test file + stale lockfiles
|
||||
for (const f of [testFile, `${testFile}.lock`]) {
|
||||
try { await fs.unlink(f); } catch (e) { /* ignore */ }
|
||||
}
|
||||
|
||||
stateManager = new StateManager(testFile, {
|
||||
lockRetries: 20,
|
||||
lockRetryInterval: 50,
|
||||
lockTimeout: 15000,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
for (const f of [testFile, `${testFile}.lock`]) {
|
||||
try { await fs.unlink(f); } catch (e) { /* ignore */ }
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
try { await fs.rm(testDir, { recursive: true }); } catch (e) { /* ignore */ }
|
||||
});
|
||||
|
||||
describe('Basic Operations', () => {
|
||||
test('creates file with empty array if not exists', async () => {
|
||||
const data = await stateManager.read();
|
||||
expect(Array.isArray(data)).toBe(true);
|
||||
expect(data.length).toBe(0);
|
||||
});
|
||||
|
||||
test('write and read roundtrip', async () => {
|
||||
const testData = [
|
||||
{ id: '1', name: 'Test Service 1' },
|
||||
{ id: '2', name: 'Test Service 2' },
|
||||
];
|
||||
|
||||
await stateManager.write(testData);
|
||||
const data = await stateManager.read();
|
||||
|
||||
expect(data).toEqual(testData);
|
||||
});
|
||||
|
||||
test('update with callback function', async () => {
|
||||
await stateManager.write([{ id: '1', name: 'Service 1' }]);
|
||||
|
||||
const updated = await stateManager.update(items => {
|
||||
items.push({ id: '2', name: 'Service 2' });
|
||||
return items;
|
||||
});
|
||||
|
||||
expect(updated.length).toBe(2);
|
||||
expect(updated[1].name).toBe('Service 2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Convenience Methods', () => {
|
||||
test('addItem adds to array', async () => {
|
||||
await stateManager.addItem({ id: '1', name: 'Service 1' });
|
||||
await stateManager.addItem({ id: '2', name: 'Service 2' });
|
||||
|
||||
const items = await stateManager.read();
|
||||
expect(items.length).toBe(2);
|
||||
});
|
||||
|
||||
test('removeItem removes by ID', async () => {
|
||||
await stateManager.write([
|
||||
{ id: '1', name: 'Service 1' },
|
||||
{ id: '2', name: 'Service 2' },
|
||||
{ id: '3', name: 'Service 3' },
|
||||
]);
|
||||
|
||||
await stateManager.removeItem('2');
|
||||
|
||||
const items = await stateManager.read();
|
||||
expect(items.length).toBe(2);
|
||||
expect(items.find(i => i.id === '2')).toBeUndefined();
|
||||
});
|
||||
|
||||
test('updateItem updates by ID', async () => {
|
||||
await stateManager.write([
|
||||
{ id: '1', name: 'Service 1', status: 'offline' },
|
||||
]);
|
||||
|
||||
await stateManager.updateItem('1', { status: 'online' });
|
||||
|
||||
const item = await stateManager.findItem('1');
|
||||
expect(item.status).toBe('online');
|
||||
expect(item.name).toBe('Service 1'); // Unchanged
|
||||
});
|
||||
|
||||
test('findItem returns null for non-existent ID', async () => {
|
||||
await stateManager.write([{ id: '1', name: 'Service 1' }]);
|
||||
|
||||
const item = await stateManager.findItem('999');
|
||||
expect(item).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Concurrent Access', () => {
|
||||
test('concurrent writes do not corrupt data', async () => {
|
||||
// Start with empty array
|
||||
await stateManager.write([]);
|
||||
|
||||
// Simulate 10 concurrent writes
|
||||
const promises = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
promises.push(
|
||||
stateManager.update(items => {
|
||||
items.push({ id: `service-${i}`, name: `Service ${i}` });
|
||||
return items;
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
await Promise.all(promises);
|
||||
|
||||
// Verify all items were added
|
||||
const items = await stateManager.read();
|
||||
expect(items.length).toBe(10);
|
||||
|
||||
// Verify JSON is valid (not corrupted)
|
||||
const fileContent = await fs.readFile(testFile, 'utf8');
|
||||
expect(() => JSON.parse(fileContent)).not.toThrow();
|
||||
});
|
||||
|
||||
test('concurrent reads while writing', async () => {
|
||||
await stateManager.write([{ id: '1', name: 'Initial' }]);
|
||||
|
||||
const writePromise = stateManager.update(async items => {
|
||||
// Simulate slow operation
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
items.push({ id: '2', name: 'New' });
|
||||
return items;
|
||||
});
|
||||
|
||||
const readPromises = [];
|
||||
for (let i = 0; i < 5; i++) {
|
||||
readPromises.push(stateManager.read());
|
||||
}
|
||||
|
||||
await Promise.all([writePromise, ...readPromises]);
|
||||
|
||||
// Should complete without errors
|
||||
const final = await stateManager.read();
|
||||
expect(final.length).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error Handling', () => {
|
||||
test('throws error on invalid JSON', async () => {
|
||||
// Write invalid JSON directly
|
||||
await fs.writeFile(testFile, '{invalid json', 'utf8');
|
||||
|
||||
await expect(stateManager.read()).rejects.toThrow();
|
||||
});
|
||||
|
||||
test('handles missing file gracefully', async () => {
|
||||
await fs.unlink(testFile);
|
||||
|
||||
const data = await stateManager.read();
|
||||
expect(Array.isArray(data)).toBe(true);
|
||||
});
|
||||
|
||||
test('update callback errors are caught', async () => {
|
||||
await expect(
|
||||
stateManager.update(() => {
|
||||
throw new Error('Test error');
|
||||
}),
|
||||
).rejects.toThrow('Test error');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Lock Management', () => {
|
||||
test('isLocked detects locked state', async () => {
|
||||
const lockfile = require('proper-lockfile');
|
||||
|
||||
// Manually lock the file
|
||||
const release = await lockfile.lock(testFile);
|
||||
|
||||
const locked = await stateManager.isLocked();
|
||||
expect(locked).toBe(true);
|
||||
|
||||
await release();
|
||||
|
||||
const unlocked = await stateManager.isLocked();
|
||||
expect(unlocked).toBe(false);
|
||||
});
|
||||
|
||||
test('forceUnlock removes stuck lock', async () => {
|
||||
const lockfile = require('proper-lockfile');
|
||||
|
||||
// Create a stuck lock
|
||||
await lockfile.lock(testFile);
|
||||
|
||||
await stateManager.forceUnlock();
|
||||
|
||||
// Should be able to write now
|
||||
await expect(stateManager.write([])).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Performance', () => {
|
||||
test('handles large datasets efficiently', async () => {
|
||||
const largeDataset = [];
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
largeDataset.push({
|
||||
id: `service-${i}`,
|
||||
name: `Service ${i}`,
|
||||
url: `https://service-${i}.example.com`,
|
||||
status: 'online',
|
||||
});
|
||||
}
|
||||
|
||||
const startTime = Date.now();
|
||||
await stateManager.write(largeDataset);
|
||||
const writeTime = Date.now() - startTime;
|
||||
|
||||
const readStart = Date.now();
|
||||
const data = await stateManager.read();
|
||||
const readTime = Date.now() - readStart;
|
||||
|
||||
expect(data.length).toBe(1000);
|
||||
expect(writeTime).toBeLessThan(1000); // Should write in <1s
|
||||
expect(readTime).toBeLessThan(100); // Should read in <100ms
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,138 +0,0 @@
|
||||
/**
|
||||
* Tailscale Route Tests
|
||||
*
|
||||
* Tests Tailscale status, configuration, and connection-checking endpoints.
|
||||
* The Tailscale routes are mounted at /api/tailscale/ on the API router:
|
||||
* - GET /api/tailscale/status — Tailscale status
|
||||
* - POST /api/tailscale/config — Update Tailscale configuration
|
||||
* - GET /api/tailscale/check-connection — Check if request comes from Tailscale IP
|
||||
* - GET /api/tailscale/devices — List Tailscale devices
|
||||
* - POST /api/tailscale/protect-service — Toggle Tailscale-only for a service
|
||||
* - POST /api/tailscale/oauth-config — OAuth credential setup (requires live API)
|
||||
* - GET /api/tailscale/api-devices — Enriched device list from API
|
||||
* - POST /api/tailscale/sync — Trigger API sync
|
||||
* - GET /api/tailscale/acl — Fetch ACL policy
|
||||
*/
|
||||
|
||||
const request = require('supertest');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
const testServicesFile = path.join(os.tmpdir(), `tailscale-services-${Date.now()}.json`);
|
||||
const testConfigFile = path.join(os.tmpdir(), `tailscale-config-${Date.now()}.json`);
|
||||
|
||||
process.env.SERVICES_FILE = testServicesFile;
|
||||
process.env.CONFIG_FILE = testConfigFile;
|
||||
process.env.ENABLE_HEALTH_CHECKER = 'false';
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
fs.writeFileSync(testServicesFile, '[]', 'utf8');
|
||||
fs.writeFileSync(testConfigFile, '{}', 'utf8');
|
||||
|
||||
const app = require('../server');
|
||||
|
||||
describe('Tailscale Routes', () => {
|
||||
afterAll(() => {
|
||||
try { fs.unlinkSync(testServicesFile); } catch (e) { /* ignore */ }
|
||||
try { fs.unlinkSync(testConfigFile); } catch (e) { /* ignore */ }
|
||||
});
|
||||
|
||||
describe('GET /api/tailscale/status', () => {
|
||||
test('should return 200 with status data', async () => {
|
||||
const res = await request(app).get('/api/tailscale/status');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
|
||||
// If Tailscale is not installed in test env, expect installed: false
|
||||
if (!res.body.installed) {
|
||||
expect(res.body.installed).toBe(false);
|
||||
expect(res.body.connected).toBe(false);
|
||||
expect(res.body.message).toBeDefined();
|
||||
} else {
|
||||
// If installed, expect richer data
|
||||
expect(res.body).toHaveProperty('connected');
|
||||
expect(res.body).toHaveProperty('self');
|
||||
expect(res.body).toHaveProperty('config');
|
||||
expect(res.body).toHaveProperty('devices');
|
||||
expect(res.body).toHaveProperty('deviceCount');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/tailscale/check-connection', () => {
|
||||
test('should return 200 with connection info', async () => {
|
||||
const res = await request(app).get('/api/tailscale/check-connection');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body).toHaveProperty('isTailscale');
|
||||
expect(typeof res.body.isTailscale).toBe('boolean');
|
||||
expect(res.body).toHaveProperty('clientIP');
|
||||
});
|
||||
|
||||
test('should detect non-Tailscale IP for localhost requests', async () => {
|
||||
const res = await request(app).get('/api/tailscale/check-connection');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
// Supertest connects via loopback, not a 100.x.x.x address
|
||||
expect(res.body.isTailscale).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/tailscale/devices', () => {
|
||||
test('should return 200 with devices array', async () => {
|
||||
const res = await request(app).get('/api/tailscale/devices');
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body).toHaveProperty('devices');
|
||||
expect(Array.isArray(res.body.devices)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/tailscale/oauth-config', () => {
|
||||
test('should reject missing required fields', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/tailscale/oauth-config')
|
||||
.send({});
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test('should reject partial credentials', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/tailscale/oauth-config')
|
||||
.send({ clientId: 'test-id' });
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/tailscale/api-devices', () => {
|
||||
test('should return 400 when OAuth is not configured', async () => {
|
||||
const res = await request(app).get('/api/tailscale/api-devices');
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/tailscale/sync', () => {
|
||||
test('should return 400 when OAuth is not configured', async () => {
|
||||
const res = await request(app).post('/api/tailscale/sync');
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/tailscale/protect-service', () => {
|
||||
test('should reject missing subdomain', async () => {
|
||||
const res = await request(app)
|
||||
.post('/api/tailscale/protect-service')
|
||||
.send({});
|
||||
|
||||
expect(res.statusCode).toBe(400);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,192 +0,0 @@
|
||||
// update-manager.js creates a Docker instance at module level.
|
||||
// On test machines without Docker, this is fine — Docker methods are only called
|
||||
// in async methods that we won't invoke in unit tests.
|
||||
|
||||
const updateManager = require('../update-manager');
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset singleton state
|
||||
updateManager.history = [];
|
||||
updateManager.availableUpdates = new Map();
|
||||
updateManager.config = { autoUpdate: {} };
|
||||
updateManager.checking = false;
|
||||
if (updateManager.checkInterval) {
|
||||
clearInterval(updateManager.checkInterval);
|
||||
updateManager.checkInterval = null;
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
updateManager.stop();
|
||||
});
|
||||
|
||||
describe('extractTag', () => {
|
||||
test('extracts tag from "nginx:latest"', () => {
|
||||
expect(updateManager.extractTag('nginx:latest')).toBe('latest');
|
||||
});
|
||||
|
||||
test('returns "latest" when no tag specified', () => {
|
||||
expect(updateManager.extractTag('nginx')).toBe('latest');
|
||||
});
|
||||
|
||||
test('extracts tag from registry/repo:tag format', () => {
|
||||
expect(updateManager.extractTag('docker.io/library/nginx:1.21')).toBe('1.21');
|
||||
});
|
||||
|
||||
test('handles tags with dots and hyphens', () => {
|
||||
expect(updateManager.extractTag('myapp:v1.2.3-rc1')).toBe('v1.2.3-rc1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseAuthHeader', () => {
|
||||
test('returns null for null header', () => {
|
||||
expect(updateManager.parseAuthHeader(null)).toBeNull();
|
||||
});
|
||||
|
||||
test('returns null for non-Bearer header', () => {
|
||||
expect(updateManager.parseAuthHeader('Basic realm="test"')).toBeNull();
|
||||
});
|
||||
|
||||
test('parses Bearer realm URL', () => {
|
||||
const header = 'Bearer realm="https://auth.docker.io/token"';
|
||||
const result = updateManager.parseAuthHeader(header);
|
||||
expect(result).toContain('https://auth.docker.io/token');
|
||||
});
|
||||
|
||||
test('includes service parameter', () => {
|
||||
const header = 'Bearer realm="https://auth.docker.io/token",service="registry.docker.io"';
|
||||
const result = updateManager.parseAuthHeader(header);
|
||||
expect(result).toContain('service=registry.docker.io');
|
||||
});
|
||||
|
||||
test('includes scope parameter', () => {
|
||||
const header = 'Bearer realm="https://auth.docker.io/token",service="registry.docker.io",scope="repository:library/nginx:pull"';
|
||||
const result = updateManager.parseAuthHeader(header);
|
||||
expect(result).toContain('scope=');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAvailableUpdates', () => {
|
||||
test('returns empty array initially', () => {
|
||||
expect(updateManager.getAvailableUpdates()).toEqual([]);
|
||||
});
|
||||
|
||||
test('returns array from availableUpdates map', () => {
|
||||
updateManager.availableUpdates.set('c1', { containerId: 'c1', imageName: 'nginx' });
|
||||
const updates = updateManager.getAvailableUpdates();
|
||||
expect(updates).toHaveLength(1);
|
||||
expect(updates[0].containerId).toBe('c1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getHistory', () => {
|
||||
test('returns entries in reverse order', () => {
|
||||
updateManager.addToHistory({ containerId: 'c1', status: 'success' });
|
||||
updateManager.addToHistory({ containerId: 'c2', status: 'success' });
|
||||
const history = updateManager.getHistory();
|
||||
expect(history[0].containerId).toBe('c2');
|
||||
});
|
||||
|
||||
test('returns empty array when no history', () => {
|
||||
expect(updateManager.getHistory()).toEqual([]);
|
||||
});
|
||||
|
||||
test('respects limit parameter', () => {
|
||||
for (let i = 0; i < 10; i++) {
|
||||
updateManager.addToHistory({ containerId: `c${i}` });
|
||||
}
|
||||
expect(updateManager.getHistory(3)).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('addToHistory', () => {
|
||||
test('appends entry', () => {
|
||||
updateManager.addToHistory({ containerId: 'c1' });
|
||||
expect(updateManager.history).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('trims to 100 entries', () => {
|
||||
for (let i = 0; i < 105; i++) {
|
||||
updateManager.addToHistory({ containerId: `c${i}` });
|
||||
}
|
||||
expect(updateManager.history.length).toBeLessThanOrEqual(100);
|
||||
});
|
||||
});
|
||||
|
||||
describe('configureAutoUpdate', () => {
|
||||
test('creates autoUpdate config section', () => {
|
||||
updateManager.configureAutoUpdate('c1', { enabled: true });
|
||||
expect(updateManager.config.autoUpdate['c1']).toBeDefined();
|
||||
});
|
||||
|
||||
test('stores container-specific config', () => {
|
||||
updateManager.configureAutoUpdate('c1', {
|
||||
enabled: true,
|
||||
schedule: 'daily',
|
||||
securityOnly: true,
|
||||
});
|
||||
expect(updateManager.config.autoUpdate['c1'].schedule).toBe('daily');
|
||||
expect(updateManager.config.autoUpdate['c1'].securityOnly).toBe(true);
|
||||
});
|
||||
|
||||
test('defaults autoRollback to true', () => {
|
||||
updateManager.configureAutoUpdate('c1', { enabled: true });
|
||||
expect(updateManager.config.autoUpdate['c1'].autoRollback).toBe(true);
|
||||
});
|
||||
|
||||
test('defaults schedule to weekly', () => {
|
||||
updateManager.configureAutoUpdate('c1', {});
|
||||
expect(updateManager.config.autoUpdate['c1'].schedule).toBe('weekly');
|
||||
});
|
||||
});
|
||||
|
||||
describe('scheduleUpdate', () => {
|
||||
test('throws for past scheduled time', () => {
|
||||
const past = new Date(Date.now() - 60000).toISOString();
|
||||
expect(() => updateManager.scheduleUpdate('c1', past)).toThrow('Scheduled time must be in the future');
|
||||
});
|
||||
|
||||
test('accepts future scheduled time', () => {
|
||||
jest.useFakeTimers();
|
||||
const future = new Date(Date.now() + 60000).toISOString();
|
||||
expect(() => updateManager.scheduleUpdate('c1', future)).not.toThrow();
|
||||
jest.useRealTimers();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getChangelog', () => {
|
||||
test('returns placeholder response', async () => {
|
||||
const result = await updateManager.getChangelog('nginx:latest');
|
||||
expect(result.imageName).toBe('nginx:latest');
|
||||
expect(result.changelog).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('start / stop', () => {
|
||||
test('start sets checking flag', () => {
|
||||
jest.useFakeTimers();
|
||||
updateManager.start();
|
||||
expect(updateManager.checking).toBe(true);
|
||||
updateManager.stop();
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
test('stop clears interval', () => {
|
||||
jest.useFakeTimers();
|
||||
updateManager.start();
|
||||
updateManager.stop();
|
||||
expect(updateManager.checking).toBe(false);
|
||||
expect(updateManager.checkInterval).toBeNull();
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
test('start is idempotent', () => {
|
||||
jest.useFakeTimers();
|
||||
updateManager.start();
|
||||
const first = updateManager.checkInterval;
|
||||
updateManager.start();
|
||||
expect(updateManager.checkInterval).toBe(first);
|
||||
updateManager.stop();
|
||||
jest.useRealTimers();
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user