Files
dashcaddy/dashcaddy-api/__tests__/backup-manager.test.js
Sami ea5acfa9a2 test: build comprehensive test suite reaching 80%+ coverage threshold
Add 22 test files (~700 tests) covering security-critical modules, core
infrastructure, API routes, and error handling. Final coverage: 86.73%
statements / 80.57% branches / 85.57% functions / 87.42% lines, all above
the 80% threshold enforced by jest.config.js.

Highlights:
- Unit tests for crypto-utils, credential-manager, auth-manager, csrf,
  input-validator, state-manager, health-checker, backup-manager,
  update-manager, resource-monitor, app-templates, platform-paths,
  port-lock-manager, errors, error-handler, pagination, url-resolver
- Route tests for health, services, and containers (supertest + mocked deps)
- Shared test-utils helper for mock factories and Express app builder
- npm scripts for CI: test:ci, test:unit, test:routes, test:security,
  test:changed, test:debug
- jest.config.js: expand coverage targets, add 80% threshold gate
- routes/services.js: import ValidationError and NotFoundError from errors
- .gitignore: exclude coverage/, *.bak, *.log

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-06 21:36:46 -07:00

785 lines
28 KiB
JavaScript

// Backup Manager Tests
// Validates backup/restore lifecycle for DashCaddy configurations
jest.mock('fs');
jest.mock('child_process');
jest.mock('../credential-manager', () => ({
exportBackup: jest.fn().mockReturnValue({ encrypted: 'cred-data' }),
importBackup: jest.fn()
}));
jest.mock('../resource-monitor', () => ({
exportStats: jest.fn().mockReturnValue({ stats: [{ cpu: 10 }] }),
importStats: jest.fn()
}));
const fs = require('fs');
const crypto = require('crypto');
const credentialManager = require('../credential-manager');
const resourceMonitor = require('../resource-monitor');
// Setup defaults BEFORE requiring singleton (constructor calls loadConfig/loadHistory)
fs.existsSync.mockReturnValue(false);
fs.readFileSync.mockReturnValue('{}');
fs.writeFileSync.mockReturnValue(undefined);
fs.mkdirSync.mockReturnValue(undefined);
fs.unlinkSync.mockReturnValue(undefined);
const backupManager = require('../backup-manager');
beforeEach(() => {
jest.clearAllMocks();
jest.useFakeTimers();
// Restore defaults
fs.existsSync.mockReturnValue(false);
fs.readFileSync.mockReturnValue('{}');
fs.writeFileSync.mockReturnValue(undefined);
fs.mkdirSync.mockReturnValue(undefined);
fs.unlinkSync.mockReturnValue(undefined);
// Reset internal state
backupManager.history = [];
backupManager.config = { backups: {}, defaultRetention: { keep: 7 } };
backupManager.running = false;
// Clear all scheduled jobs directly (stop() only clears when running=true)
for (const [, job] of backupManager.scheduledJobs.entries()) {
clearInterval(job);
}
backupManager.scheduledJobs.clear();
});
afterEach(() => {
backupManager.stop();
jest.useRealTimers();
});
describe('BackupManager — backup/restore lifecycle', () => {
describe('constructor and config', () => {
it('starts with empty config when no config file exists', () => {
const config = backupManager.getConfig();
expect(config.backups).toEqual({});
expect(config.defaultRetention).toEqual({ keep: 7 });
});
it('loadConfig returns saved config when file exists', () => {
const savedConfig = {
backups: { daily: { enabled: true, schedule: 'daily' } },
defaultRetention: { keep: 14 }
};
fs.existsSync.mockReturnValue(true);
fs.readFileSync.mockReturnValue(JSON.stringify(savedConfig));
const config = backupManager.loadConfig();
expect(config.backups.daily).toBeDefined();
expect(config.defaultRetention.keep).toBe(14);
});
it('loadConfig returns defaults on error', () => {
fs.existsSync.mockReturnValue(true);
fs.readFileSync.mockImplementation(() => { throw new Error('read error'); });
const config = backupManager.loadConfig();
expect(config.backups).toEqual({});
});
it('loadHistory returns empty array when no file', () => {
fs.existsSync.mockReturnValue(false);
expect(backupManager.loadHistory()).toEqual([]);
});
it('loadHistory loads saved entries', () => {
const history = [{ id: 'test-1', status: 'success' }];
fs.existsSync.mockReturnValue(true);
fs.readFileSync.mockReturnValue(JSON.stringify(history));
expect(backupManager.loadHistory()).toEqual(history);
});
});
describe('start/stop scheduler', () => {
it('does nothing on double start', () => {
backupManager.start();
backupManager.start(); // should not throw
expect(backupManager.running).toBe(true);
});
it('does nothing on stop when not running', () => {
backupManager.stop(); // should not throw
expect(backupManager.running).toBe(false);
});
it('clears scheduled jobs on stop', () => {
backupManager.scheduledJobs.set('test', setInterval(() => {}, 10000));
backupManager.running = true;
backupManager.stop();
expect(backupManager.scheduledJobs.size).toBe(0);
expect(backupManager.running).toBe(false);
});
});
describe('scheduleBackup intervals', () => {
it('schedules hourly backup', () => {
backupManager.scheduleBackup('test', { schedule: 'hourly' });
expect(backupManager.scheduledJobs.has('test')).toBe(true);
});
it('schedules daily backup', () => {
backupManager.scheduleBackup('test', { schedule: 'daily' });
expect(backupManager.scheduledJobs.has('test')).toBe(true);
});
it('schedules weekly backup', () => {
backupManager.scheduleBackup('test', { schedule: 'weekly' });
expect(backupManager.scheduledJobs.has('test')).toBe(true);
});
it('schedules monthly backup', () => {
backupManager.scheduleBackup('test', { schedule: 'monthly' });
expect(backupManager.scheduledJobs.has('test')).toBe(true);
});
it('accepts custom interval in minutes', () => {
backupManager.scheduleBackup('test', { schedule: '30' });
expect(backupManager.scheduledJobs.has('test')).toBe(true);
});
it('rejects invalid schedule', () => {
backupManager.scheduleBackup('test', { schedule: 'bogus' });
expect(backupManager.scheduledJobs.has('test')).toBe(false);
});
});
describe('compress/decompress', () => {
it('round-trips data through gzip', async () => {
const original = { version: '1.0', data: { services: [{ id: 'plex' }] } };
const compressed = await backupManager.compressBackup(original);
expect(Buffer.isBuffer(compressed)).toBe(true);
const decompressed = await backupManager.decompressBackup(compressed);
expect(decompressed).toEqual(original);
});
it('compressed output is smaller than JSON', async () => {
const data = { bigArray: Array(100).fill({ id: 'test', name: 'test-service' }) };
const compressed = await backupManager.compressBackup(data);
expect(compressed.length).toBeLessThan(JSON.stringify(data).length);
});
});
describe('encrypt/decrypt (AES-256-GCM)', () => {
const testKey = crypto.randomBytes(32).toString('hex');
it('round-trips data through encryption', async () => {
const original = Buffer.from('DashCaddy backup data');
const encrypted = await backupManager.encryptBackup(original, testKey);
const decrypted = await backupManager.decryptBackup(encrypted, testKey);
expect(decrypted.toString()).toBe('DashCaddy backup data');
});
it('encrypted format is iv:authTag:ciphertext (base64)', async () => {
const data = Buffer.from('test');
const encrypted = await backupManager.encryptBackup(data, testKey);
const parts = encrypted.toString().split(':');
expect(parts.length).toBeGreaterThanOrEqual(3);
});
it('rejects tampered data (auth tag mismatch)', async () => {
const data = Buffer.from('test');
const encrypted = await backupManager.encryptBackup(data, testKey);
// Corrupt the first character of the IV
const str = encrypted.toString();
const tampered = Buffer.from('X' + str.substring(1));
await expect(backupManager.decryptBackup(tampered, testKey))
.rejects.toThrow();
});
it('rejects wrong key', async () => {
const data = Buffer.from('test');
const encrypted = await backupManager.encryptBackup(data, testKey);
const wrongKey = crypto.randomBytes(32).toString('hex');
await expect(backupManager.decryptBackup(encrypted, wrongKey))
.rejects.toThrow();
});
it('rejects invalid format (fewer than 3 parts)', async () => {
await expect(backupManager.decryptBackup(Buffer.from('onlyonepart'), testKey))
.rejects.toThrow('Invalid encrypted backup format');
});
});
describe('calculateChecksum', () => {
it('returns SHA-256 hex digest', () => {
const data = Buffer.from('test data');
const checksum = backupManager.calculateChecksum(data);
expect(checksum).toMatch(/^[a-f0-9]{64}$/);
});
it('same data produces same checksum', () => {
const data = Buffer.from('DashCaddy');
expect(backupManager.calculateChecksum(data))
.toBe(backupManager.calculateChecksum(data));
});
it('different data produces different checksum', () => {
expect(backupManager.calculateChecksum(Buffer.from('A')))
.not.toBe(backupManager.calculateChecksum(Buffer.from('B')));
});
});
describe('saveToLocal', () => {
it('creates backup directory if missing', async () => {
fs.existsSync.mockReturnValue(false);
await backupManager.saveToLocal(Buffer.from('data'), { path: '/custom/backups' }, 'test-123');
expect(fs.mkdirSync).toHaveBeenCalledWith('/custom/backups', { recursive: true });
});
it('writes backup file with correct name', async () => {
fs.existsSync.mockReturnValue(true);
const result = await backupManager.saveToLocal(Buffer.from('data'), {}, 'daily-1234');
expect(fs.writeFileSync).toHaveBeenCalledWith(
expect.stringContaining('daily-1234.backup'),
expect.any(Buffer)
);
expect(result.type).toBe('local');
expect(result.size).toBe(4);
});
});
describe('verifyBackup', () => {
it('passes when checksum matches', async () => {
const data = Buffer.from('verified');
const checksum = crypto.createHash('sha256').update(data).digest('hex');
fs.readFileSync.mockReturnValue(data);
const result = await backupManager.verifyBackup({ type: 'local', path: '/backup.dat' }, checksum);
expect(result).toBe(true);
});
it('throws on checksum mismatch', async () => {
fs.readFileSync.mockReturnValue(Buffer.from('tampered'));
await expect(backupManager.verifyBackup(
{ type: 'local', path: '/backup.dat' },
'wrong-checksum'
)).rejects.toThrow('checksum mismatch');
});
});
describe('history management', () => {
it('addToHistory appends and saves', () => {
backupManager.addToHistory({ id: 'test-1', status: 'success' });
expect(backupManager.getHistory()).toHaveLength(1);
expect(fs.writeFileSync).toHaveBeenCalled();
});
it('caps history at 100 entries', () => {
for (let i = 0; i < 110; i++) {
backupManager.addToHistory({ id: `test-${i}`, status: 'success' });
}
expect(backupManager.history.length).toBe(100);
});
it('getHistory returns newest first', () => {
backupManager.addToHistory({ id: 'old', status: 'success' });
backupManager.addToHistory({ id: 'new', status: 'success' });
const history = backupManager.getHistory();
expect(history[0].id).toBe('new');
expect(history[1].id).toBe('old');
});
it('getHistory respects limit', () => {
for (let i = 0; i < 10; i++) {
backupManager.addToHistory({ id: `test-${i}`, status: 'success' });
}
expect(backupManager.getHistory(3)).toHaveLength(3);
});
});
describe('updateConfig', () => {
it('merges new config and saves', () => {
backupManager.updateConfig({ customSetting: true });
expect(backupManager.getConfig().customSetting).toBe(true);
expect(fs.writeFileSync).toHaveBeenCalled();
});
it('restarts scheduler on config update', () => {
backupManager.start();
expect(backupManager.running).toBe(true);
backupManager.updateConfig({ backups: {} });
// Should still be running after restart
expect(backupManager.running).toBe(true);
});
});
describe('backupServices / backupConfig', () => {
it('reads services.json when it exists', () => {
fs.existsSync.mockReturnValue(true);
fs.readFileSync.mockReturnValue(JSON.stringify([{ id: 'plex' }]));
const result = backupManager.backupServices();
expect(result).toEqual([{ id: 'plex' }]);
});
it('returns null when services.json missing', () => {
fs.existsSync.mockReturnValue(false);
expect(backupManager.backupServices()).toBeNull();
});
it('returns null on read error', () => {
fs.existsSync.mockReturnValue(true);
fs.readFileSync.mockImplementation(() => { throw new Error('read error'); });
expect(backupManager.backupServices()).toBeNull();
});
it('reads config.json when it exists', () => {
fs.existsSync.mockReturnValue(true);
fs.readFileSync.mockReturnValue(JSON.stringify({ tld: '.sami' }));
const result = backupManager.backupConfig();
expect(result).toEqual({ tld: '.sami' });
});
});
describe('cleanupOldBackups', () => {
it('deletes backups beyond retention limit', async () => {
// Add 5 successful backups
for (let i = 0; i < 5; i++) {
backupManager.history.push({
id: `daily-${i}`,
name: 'daily',
status: 'success',
timestamp: new Date(2026, 0, i + 1).toISOString(),
locations: [{ type: 'local', path: `/backups/daily-${i}.backup` }]
});
}
fs.existsSync.mockReturnValue(true);
await backupManager.cleanupOldBackups('daily', { keep: 2 });
// Should delete 3 oldest
expect(fs.unlinkSync).toHaveBeenCalledTimes(3);
// History should have 2 remaining for 'daily'
const remaining = backupManager.history.filter(b => b.name === 'daily');
expect(remaining).toHaveLength(2);
});
it('keeps all when under retention limit', async () => {
backupManager.history.push({
id: 'daily-1', name: 'daily', status: 'success',
timestamp: new Date().toISOString(),
locations: [{ type: 'local', path: '/backups/daily-1.backup' }]
});
await backupManager.cleanupOldBackups('daily', { keep: 7 });
expect(fs.unlinkSync).not.toHaveBeenCalled();
});
});
describe('backupCredentials / backupStats', () => {
it('returns credential export data', () => {
const result = backupManager.backupCredentials();
expect(result).toEqual({ encrypted: 'cred-data' });
expect(credentialManager.exportBackup).toHaveBeenCalled();
});
it('returns null on credential export error', () => {
credentialManager.exportBackup.mockImplementationOnce(() => { throw new Error('no key'); });
expect(backupManager.backupCredentials()).toBeNull();
});
it('returns stats export data', () => {
const result = backupManager.backupStats();
expect(result).toEqual({ stats: [{ cpu: 10 }] });
expect(resourceMonitor.exportStats).toHaveBeenCalled();
});
it('returns null on stats export error', () => {
resourceMonitor.exportStats.mockImplementationOnce(() => { throw new Error('no stats'); });
expect(backupManager.backupStats()).toBeNull();
});
});
describe('createBackupData', () => {
it('includes all sources when "all" specified', async () => {
fs.existsSync.mockReturnValue(true);
fs.readFileSync.mockImplementation((filePath) => {
if (typeof filePath === 'string') {
if (filePath.includes('services')) return JSON.stringify([{ id: 'plex' }]);
if (filePath.includes('config')) return JSON.stringify({ tld: '.sami' });
}
return '{}';
});
const data = await backupManager.createBackupData(['all']);
expect(data.version).toBe('1.0');
expect(data.data.services).toEqual([{ id: 'plex' }]);
expect(data.data.config).toEqual({ tld: '.sami' });
expect(data.data.credentials).toEqual({ encrypted: 'cred-data' });
expect(data.data.stats).toEqual({ stats: [{ cpu: 10 }] });
});
it('includes only credentials when specified', async () => {
const data = await backupManager.createBackupData(['credentials']);
expect(data.data.credentials).toEqual({ encrypted: 'cred-data' });
expect(data.data.services).toBeUndefined();
});
it('includes only stats when specified', async () => {
const data = await backupManager.createBackupData(['stats']);
expect(data.data.stats).toEqual({ stats: [{ cpu: 10 }] });
expect(data.data.services).toBeUndefined();
});
});
describe('saveToDestination', () => {
it('routes to saveToLocal for local type', async () => {
fs.existsSync.mockReturnValue(true);
const result = await backupManager.saveToDestination(Buffer.from('data'), { type: 'local' }, 'bk-1');
expect(result.type).toBe('local');
expect(fs.writeFileSync).toHaveBeenCalled();
});
it('throws for unsupported destination type', async () => {
await expect(backupManager.saveToDestination(Buffer.from('data'), { type: 's3' }, 'bk-1'))
.rejects.toThrow('Unsupported destination type: s3');
});
});
describe('executeBackup', () => {
it('runs full backup pipeline and records success in history', async () => {
fs.existsSync.mockReturnValue(true);
fs.readFileSync.mockImplementation((filePath) => {
if (typeof filePath === 'string') {
if (filePath.includes('services')) return JSON.stringify([{ id: 'plex' }]);
if (filePath.includes('config')) return JSON.stringify({ tld: '.sami' });
}
return '{}';
});
const events = [];
backupManager.on('backup-start', e => events.push({ type: 'start', ...e }));
backupManager.on('backup-complete', e => events.push({ type: 'complete', ...e }));
const result = await backupManager.executeBackup('daily', {
include: ['services', 'config'],
destinations: [{ type: 'local' }],
verify: false
});
expect(result.status).toBe('success');
expect(result.name).toBe('daily');
expect(result.compressed).toBe(true);
expect(result.size).toBeGreaterThan(0);
expect(backupManager.history).toHaveLength(1);
expect(events).toHaveLength(2);
expect(events[0].type).toBe('start');
expect(events[1].type).toBe('complete');
backupManager.removeAllListeners();
});
it('runs encrypted backup pipeline', async () => {
const key = crypto.randomBytes(32).toString('hex');
fs.existsSync.mockReturnValue(true);
fs.readFileSync.mockReturnValue(JSON.stringify([{ id: 'plex' }]));
const result = await backupManager.executeBackup('encrypted', {
include: ['services'],
destinations: [{ type: 'local' }],
encrypt: true,
encryptionKey: key,
verify: false
});
expect(result.status).toBe('success');
expect(result.encrypted).toBe(true);
});
it('records failure in history when all destinations fail', async () => {
fs.existsSync.mockReturnValue(true);
fs.readFileSync.mockReturnValue(JSON.stringify([{ id: 'plex' }]));
fs.writeFileSync.mockImplementation((path) => {
if (typeof path === 'string' && path.includes('.backup')) throw new Error('disk full');
});
const events = [];
backupManager.on('backup-failed', e => events.push(e));
await expect(backupManager.executeBackup('daily', {
include: ['services'],
destinations: [{ type: 'local' }],
verify: false
})).rejects.toThrow('Failed to save backup to any destination');
expect(backupManager.history).toHaveLength(1);
expect(backupManager.history[0].status).toBe('failed');
expect(events).toHaveLength(1);
backupManager.removeAllListeners();
});
it('runs cleanup after successful backup with retention', async () => {
fs.existsSync.mockReturnValue(true);
fs.readFileSync.mockReturnValue(JSON.stringify([{ id: 'plex' }]));
// Pre-fill history with old backups
for (let i = 0; i < 5; i++) {
backupManager.history.push({
id: `daily-old-${i}`, name: 'daily', status: 'success',
timestamp: new Date(2026, 0, i + 1).toISOString(),
locations: [{ type: 'local', path: `/backups/daily-old-${i}.backup` }]
});
}
await backupManager.executeBackup('daily', {
include: ['services'],
destinations: [{ type: 'local' }],
verify: false,
retention: { keep: 2 }
});
// Old backups should be cleaned up (5 old + 1 new = 6 total, keep 2 → delete 4)
expect(fs.unlinkSync).toHaveBeenCalled();
});
});
describe('restoreBackup', () => {
it('throws when backup not found in history', async () => {
await expect(backupManager.restoreBackup('nonexistent'))
.rejects.toThrow('Backup not found: nonexistent');
});
it('throws on unsupported backup version', async () => {
// Create backup data with wrong version
const wrongVersionData = { version: '2.0', data: {} };
const compressed = await backupManager.compressBackup(wrongVersionData);
backupManager.history.push({
id: 'test-restore',
status: 'success',
encrypted: false,
locations: [{ type: 'local', path: '/backups/test-restore.backup' }]
});
fs.readFileSync.mockReturnValue(compressed);
await expect(backupManager.restoreBackup('test-restore'))
.rejects.toThrow('Unsupported backup version: 2.0');
});
it('restores services and config from backup', async () => {
const backupData = {
version: '1.0',
data: {
services: [{ id: 'plex' }, { id: 'radarr' }],
config: { tld: '.sami' }
}
};
const compressed = await backupManager.compressBackup(backupData);
backupManager.history.push({
id: 'test-restore',
status: 'success',
encrypted: false,
locations: [{ type: 'local', path: '/backups/test-restore.backup' }]
});
fs.readFileSync.mockReturnValue(compressed);
const events = [];
backupManager.on('restore-start', e => events.push({ type: 'start', ...e }));
backupManager.on('restore-complete', e => events.push({ type: 'complete', ...e }));
const result = await backupManager.restoreBackup('test-restore');
expect(result.success).toBe(true);
expect(result.restored.services).toBe(true);
expect(result.restored.config).toBe(true);
expect(fs.writeFileSync).toHaveBeenCalledWith(
expect.stringContaining('services'),
expect.stringContaining('plex')
);
expect(events).toHaveLength(2);
backupManager.removeAllListeners();
});
it('restores credentials and stats from backup', async () => {
const backupData = {
version: '1.0',
data: {
credentials: { encrypted: 'cred-data' },
stats: { stats: [{ cpu: 10 }] }
}
};
const compressed = await backupManager.compressBackup(backupData);
backupManager.history.push({
id: 'full-restore',
status: 'success',
encrypted: false,
locations: [{ type: 'local', path: '/backups/full-restore.backup' }]
});
fs.readFileSync.mockReturnValue(compressed);
const result = await backupManager.restoreBackup('full-restore');
expect(result.restored.credentials).toBe(true);
expect(result.restored.stats).toBe(true);
expect(credentialManager.importBackup).toHaveBeenCalledWith({ encrypted: 'cred-data' });
expect(resourceMonitor.importStats).toHaveBeenCalledWith({ stats: [{ cpu: 10 }] });
});
it('restores encrypted backup', async () => {
const key = crypto.randomBytes(32).toString('hex');
const backupData = { version: '1.0', data: { services: [{ id: 'plex' }] } };
const compressed = await backupManager.compressBackup(backupData);
const encrypted = await backupManager.encryptBackup(compressed, key);
backupManager.history.push({
id: 'enc-restore',
status: 'success',
encrypted: true,
locations: [{ type: 'local', path: '/backups/enc-restore.backup' }]
});
fs.readFileSync.mockReturnValue(encrypted);
const result = await backupManager.restoreBackup('enc-restore', { encryptionKey: key });
expect(result.success).toBe(true);
expect(result.restored.services).toBe(true);
});
it('emits restore-failed on error', async () => {
backupManager.history.push({
id: 'fail-restore',
status: 'success',
encrypted: false,
locations: [{ type: 'local', path: '/backups/fail-restore.backup' }]
});
fs.readFileSync.mockImplementation(() => { throw new Error('read error'); });
const events = [];
backupManager.on('restore-failed', e => events.push(e));
await expect(backupManager.restoreBackup('fail-restore'))
.rejects.toThrow();
expect(events).toHaveLength(1);
expect(events[0].error).toBeDefined();
backupManager.removeAllListeners();
});
it('skips restore of specific sections when options disable them', async () => {
const backupData = {
version: '1.0',
data: {
services: [{ id: 'plex' }],
config: { tld: '.sami' },
credentials: { encrypted: 'data' },
stats: { stats: [] }
}
};
const compressed = await backupManager.compressBackup(backupData);
backupManager.history.push({
id: 'partial-restore',
status: 'success',
encrypted: false,
locations: [{ type: 'local', path: '/backups/partial.backup' }]
});
fs.readFileSync.mockReturnValue(compressed);
const result = await backupManager.restoreBackup('partial-restore', {
restoreServices: false,
restoreConfig: false,
restoreCredentials: false,
restoreStats: false
});
expect(result.success).toBe(true);
expect(result.restored.services).toBeUndefined();
expect(result.restored.config).toBeUndefined();
expect(result.restored.credentials).toBeUndefined();
expect(result.restored.stats).toBeUndefined();
});
});
describe('start with configured backups', () => {
it('schedules enabled backups on start', () => {
backupManager.config = {
backups: {
daily: { enabled: true, schedule: 'daily' },
disabled: { enabled: false, schedule: 'hourly' }
},
defaultRetention: { keep: 7 }
};
backupManager.start();
expect(backupManager.scheduledJobs.has('daily')).toBe(true);
expect(backupManager.scheduledJobs.has('disabled')).toBe(false);
});
});
describe('persistence error handling', () => {
it('saveConfig handles write error gracefully', () => {
fs.writeFileSync.mockImplementation(() => { throw new Error('disk full'); });
expect(() => backupManager.saveConfig()).not.toThrow();
});
it('saveHistory handles write error gracefully', () => {
fs.writeFileSync.mockImplementation(() => { throw new Error('disk full'); });
expect(() => backupManager.saveHistory()).not.toThrow();
});
it('backupConfig returns null on read error', () => {
fs.existsSync.mockReturnValue(true);
fs.readFileSync.mockImplementation(() => { throw new Error('corrupt'); });
expect(backupManager.backupConfig()).toBeNull();
});
});
describe('verifyBackup edge cases', () => {
it('returns true for non-local backup type', async () => {
const result = await backupManager.verifyBackup({ type: 'remote', path: 'na' }, 'checksum');
expect(result).toBe(true);
});
});
describe('DashCaddy scenarios', () => {
it('full backup pipeline: services + config → compress → verify', async () => {
fs.existsSync.mockReturnValue(true);
fs.readFileSync.mockImplementation((filePath) => {
if (typeof filePath === 'string') {
if (filePath.includes('services')) return JSON.stringify([{ id: 'plex' }, { id: 'radarr' }]);
if (filePath.includes('config')) return JSON.stringify({ tld: '.sami', mode: 'homelab' });
}
return '{}';
});
const data = await backupManager.createBackupData(['services', 'config']);
expect(data.version).toBe('1.0');
expect(data.data.services).toEqual([{ id: 'plex' }, { id: 'radarr' }]);
expect(data.data.config).toEqual({ tld: '.sami', mode: 'homelab' });
// Compress and verify round-trip
const compressed = await backupManager.compressBackup(data);
const decompressed = await backupManager.decompressBackup(compressed);
expect(decompressed.data.services).toEqual(data.data.services);
});
it('encrypted backup round-trip with real AES-256-GCM', async () => {
const key = crypto.randomBytes(32).toString('hex');
const payload = { version: '1.0', data: { services: [{ id: 'jellyfin' }] } };
const compressed = await backupManager.compressBackup(payload);
const encrypted = await backupManager.encryptBackup(compressed, key);
const decrypted = await backupManager.decryptBackup(encrypted, key);
const restored = await backupManager.decompressBackup(decrypted);
expect(restored.data.services[0].id).toBe('jellyfin');
});
});
});