Files
dashcaddy/dashcaddy-api/backup-manager.js
Sami f537a0dd25 feat: cloud backup destinations + long-term resource history
Cloud backups (Dropbox / WebDAV / SFTP):
- backup-manager.js: save + load handlers per provider, credential
  resolution via credentialManager, destination probe.
- routes/backups.js: /credentials/{provider} (masked GET, POST, DELETE),
  /test-destination, scheduling endpoints.
- status/js/backup-restore.js: destination picker, provider-specific
  credential forms, test button wired to backend probe.
- npm deps already present (dropbox 10.34.0, webdav 5.7.1,
  ssh2-sftp-client 11.0.0).

Resource history:
- resource-monitor.js: three-tier rollup storage — raw 10s samples
  (7-day retention), hourly rollups (30-day), daily rollups
  (365-day). getHistoryByRange() auto-selects the appropriate tier.
- routes/monitoring.js: /monitoring/history/:containerId now supports
  startTime/endTime range mode (legacy ?hours=N still works).
- status/js/resource-monitor.js + dashboard.css: "History" tab with
  range buttons (1h/24h/7d/30d/1y), SVG sparklines for
  CPU / memory / network. Renderer handles raw and rolled-up shapes.

status/dist/features.js rebuilt from source via build.js.

Lifted out of wip/cloud-backups-and-history; the half-finished
app-deps feature from that branch (frontend calls /api/v1/apps/
check-dependencies but the endpoint doesn't exist) is preserved
separately on wip/app-deps for later.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-06 19:14:38 -07:00

1130 lines
32 KiB
JavaScript

/**
* Automated Backup & Restore Manager
* Handles scheduled backups with local storage
*/
const fs = require('fs');
const path = require('path');
const { execSync } = require('child_process');
const crypto = require('crypto');
const EventEmitter = require('events');
const BACKUP_CONFIG_FILE = process.env.BACKUP_CONFIG_FILE || path.join(__dirname, 'backup-config.json');
const BACKUP_HISTORY_FILE = process.env.BACKUP_HISTORY_FILE || path.join(__dirname, 'backup-history.json');
const DEFAULT_BACKUP_DIR = process.env.BACKUP_DIR || path.join(__dirname, 'backups');
class BackupManager extends EventEmitter {
constructor() {
super();
this.config = this.loadConfig();
this.history = this.loadHistory();
this.scheduledJobs = new Map();
this.running = false;
}
/**
* Start backup scheduler
*/
start() {
if (this.running) return;
console.log('[BackupManager] Starting backup scheduler');
this.running = true;
// Schedule all configured backups
for (const [name, backup] of Object.entries(this.config.backups || {})) {
if (backup.enabled && backup.schedule) {
this.scheduleBackup(name, backup);
}
}
}
/**
* Stop backup scheduler
*/
stop() {
if (!this.running) return;
console.log('[BackupManager] Stopping backup scheduler');
this.running = false;
// Clear all scheduled jobs
for (const [name, job] of this.scheduledJobs.entries()) {
clearInterval(job);
}
this.scheduledJobs.clear();
}
/**
* Schedule a backup job
*/
scheduleBackup(name, backup) {
// Parse schedule (cron-like: daily, weekly, monthly, or interval in minutes)
let intervalMs;
switch (backup.schedule) {
case 'hourly':
intervalMs = 60 * 60 * 1000;
break;
case 'daily':
intervalMs = 24 * 60 * 60 * 1000;
break;
case 'weekly':
intervalMs = 7 * 24 * 60 * 60 * 1000;
break;
case 'monthly':
intervalMs = 30 * 24 * 60 * 60 * 1000;
break;
default: {
// Custom interval in minutes
const minutes = parseInt(backup.schedule, 10);
if (!isNaN(minutes) && minutes > 0) {
intervalMs = minutes * 60 * 1000;
} else {
console.error(`[BackupManager] Invalid schedule for ${name}: ${backup.schedule}`);
return;
}
}
}
// Schedule the job
const job = setInterval(() => {
this.executeBackup(name, backup).catch(error => {
console.error(`[BackupManager] Scheduled backup ${name} failed:`, error.message);
});
}, intervalMs);
this.scheduledJobs.set(name, job);
console.log(`[BackupManager] Scheduled backup '${name}' every ${backup.schedule}`);
// Run immediately if configured
if (backup.runImmediately) {
this.executeBackup(name, backup).catch(error => {
console.error(`[BackupManager] Initial backup ${name} failed:`, error.message);
});
}
}
/**
* Execute a backup
*/
async executeBackup(name, backup) {
const startTime = Date.now();
const backupId = `${name}-${Date.now()}`;
console.log(`[BackupManager] Starting backup: ${name}`);
this.emit('backup-start', { name, backupId, timestamp: new Date().toISOString() });
try {
// Create backup data
const backupData = await this.createBackupData(backup.include || ['all']);
// Compress backup
const compressed = await this.compressBackup(backupData);
// Encrypt if configured
let finalData = compressed;
if (backup.encrypt && backup.encryptionKey) {
finalData = await this.encryptBackup(compressed, backup.encryptionKey);
}
// Calculate checksum
const checksum = this.calculateChecksum(finalData);
// Save to destinations
const destinations = backup.destinations || [{ type: 'local' }];
const savedLocations = [];
for (const dest of destinations) {
try {
const location = await this.saveToDestination(finalData, dest, backupId);
savedLocations.push(location);
} catch (error) {
console.error(`[BackupManager] Failed to save to ${dest.type}:`, error.message);
}
}
if (savedLocations.length === 0) {
throw new Error('Failed to save backup to any destination');
}
// Verify backup
if (backup.verify !== false) {
await this.verifyBackup(savedLocations[0], checksum);
}
// Record in history
const duration = Date.now() - startTime;
const historyEntry = {
id: backupId,
name,
timestamp: new Date().toISOString(),
duration,
size: finalData.length,
checksum,
locations: savedLocations,
encrypted: !!backup.encrypt,
compressed: true,
status: 'success'
};
this.addToHistory(historyEntry);
// Cleanup old backups
if (backup.retention) {
await this.cleanupOldBackups(name, backup.retention);
}
this.emit('backup-complete', historyEntry);
console.log(`[BackupManager] Backup ${name} completed in ${duration}ms`);
return historyEntry;
} catch (error) {
const duration = Date.now() - startTime;
const historyEntry = {
id: backupId,
name,
timestamp: new Date().toISOString(),
duration,
status: 'failed',
error: error.message
};
this.addToHistory(historyEntry);
this.emit('backup-failed', historyEntry);
throw error;
}
}
/**
* Create backup data from specified sources
*/
async createBackupData(include) {
const data = {
version: '1.0',
timestamp: new Date().toISOString(),
hostname: require('os').hostname(),
data: {}
};
for (const source of include) {
switch (source) {
case 'all':
data.data.services = this.backupServices();
data.data.config = this.backupConfig();
data.data.credentials = this.backupCredentials();
data.data.stats = this.backupStats();
break;
case 'services':
data.data.services = this.backupServices();
break;
case 'config':
data.data.config = this.backupConfig();
break;
case 'credentials':
data.data.credentials = this.backupCredentials();
break;
case 'stats':
data.data.stats = this.backupStats();
break;
case 'volumes':
data.data.volumes = await this.backupVolumes();
break;
}
}
return data;
}
/**
* Backup services configuration
*/
backupServices() {
try {
const servicesFile = process.env.SERVICES_FILE || path.join(__dirname, 'services.json');
if (fs.existsSync(servicesFile)) {
return JSON.parse(fs.readFileSync(servicesFile, 'utf8'));
}
} catch (error) {
console.error('[BackupManager] Error backing up services:', error.message);
}
return null;
}
/**
* Backup configuration files
*/
backupConfig() {
try {
const configFile = process.env.CONFIG_FILE || path.join(__dirname, 'config.json');
if (fs.existsSync(configFile)) {
return JSON.parse(fs.readFileSync(configFile, 'utf8'));
}
} catch (error) {
console.error('[BackupManager] Error backing up config:', error.message);
}
return null;
}
/**
* Backup credentials (encrypted)
*/
backupCredentials() {
try {
const credentialManager = require('./credential-manager');
return credentialManager.exportBackup();
} catch (error) {
console.error('[BackupManager] Error backing up credentials:', error.message);
}
return null;
}
/**
* Backup resource stats
*/
backupStats() {
try {
const resourceMonitor = require('./resource-monitor');
return resourceMonitor.exportStats();
} catch (error) {
console.error('[BackupManager] Error backing up stats:', error.message);
}
return null;
}
/**
* Backup Docker volumes
* Creates tar archives of Docker volumes for backup
* @returns {Object|null} Volume backup metadata or null on failure
*/
async backupVolumes() {
try {
const Docker = require('dockerode');
const docker = new Docker();
// Get list of all volumes
const volumeData = await docker.listVolumes();
const volumes = volumeData.Volumes || [];
if (volumes.length === 0) {
return { volumes: [], message: 'No volumes found' };
}
const backupDir = path.join(DEFAULT_BACKUP_DIR, 'volumes');
if (!fs.existsSync(backupDir)) {
fs.mkdirSync(backupDir, { recursive: true });
}
const timestamp = Date.now();
const backupResults = [];
for (const volume of volumes) {
try {
const volumeName = volume.Name;
const backupFile = path.join(backupDir, `${volumeName}-${timestamp}.tar.gz`);
// Create a temporary container to backup the volume
// Using alpine with tar to create the archive
const container = await docker.createContainer({
Image: 'alpine:latest',
Cmd: ['tar', 'czf', '/backup/volume.tar.gz', '-C', '/volume', '.'],
HostConfig: {
Binds: [
`${volumeName}:/volume:ro`,
`${backupDir}:/backup`
],
AutoRemove: true
}
});
// Start and wait for completion
await container.start();
await container.wait();
// Rename the backup file to include volume name
const tempFile = path.join(backupDir, 'volume.tar.gz');
if (fs.existsSync(tempFile)) {
fs.renameSync(tempFile, backupFile);
const stats = fs.statSync(backupFile);
backupResults.push({
name: volumeName,
driver: volume.Driver,
path: backupFile,
size: stats.size,
timestamp: new Date().toISOString(),
status: 'success'
});
}
} catch (volumeError) {
console.error(`[BackupManager] Error backing up volume ${volume.Name}:`, volumeError.message);
backupResults.push({
name: volume.Name,
status: 'failed',
error: volumeError.message
});
}
}
return {
timestamp: new Date().toISOString(),
totalVolumes: volumes.length,
successCount: backupResults.filter(r => r.status === 'success').length,
volumes: backupResults
};
} catch (error) {
console.error('[BackupManager] Error backing up volumes:', error.message);
return null;
}
}
/**
* Restore Docker volumes from backup
* @param {Object} volumeBackup - Volume backup metadata from backupVolumes()
* @returns {Object} Restore results
*/
async restoreVolumes(volumeBackup) {
if (!volumeBackup || !volumeBackup.volumes) {
throw new Error('Invalid volume backup data');
}
const Docker = require('dockerode');
const docker = new Docker();
const restoreResults = [];
for (const volBackup of volumeBackup.volumes) {
if (volBackup.status !== 'success' || !volBackup.path) {
continue;
}
try {
// Check if backup file exists
if (!fs.existsSync(volBackup.path)) {
throw new Error(`Backup file not found: ${volBackup.path}`);
}
const volumeName = volBackup.name;
const backupDir = path.dirname(volBackup.path);
// Create volume if it doesn't exist
try {
await docker.createVolume({ Name: volumeName });
} catch (e) {
// Volume might already exist, that's OK
}
// Copy backup file to a temp name for mounting
const tempBackupFile = path.join(backupDir, 'restore-volume.tar.gz');
fs.copyFileSync(volBackup.path, tempBackupFile);
// Create container to restore the volume
const container = await docker.createContainer({
Image: 'alpine:latest',
Cmd: ['sh', '-c', 'rm -rf /volume/* && tar xzf /backup/restore-volume.tar.gz -C /volume'],
HostConfig: {
Binds: [
`${volumeName}:/volume`,
`${backupDir}:/backup:ro`
],
AutoRemove: true
}
});
await container.start();
await container.wait();
// Clean up temp file
if (fs.existsSync(tempBackupFile)) {
fs.unlinkSync(tempBackupFile);
}
restoreResults.push({
name: volumeName,
status: 'success',
timestamp: new Date().toISOString()
});
console.log(`[BackupManager] Volume ${volumeName} restored successfully`);
} catch (restoreError) {
console.error(`[BackupManager] Error restoring volume ${volBackup.name}:`, restoreError.message);
restoreResults.push({
name: volBackup.name,
status: 'failed',
error: restoreError.message
});
}
}
return {
timestamp: new Date().toISOString(),
results: restoreResults,
successCount: restoreResults.filter(r => r.status === 'success').length,
failedCount: restoreResults.filter(r => r.status === 'failed').length
};
}
/**
* Compress backup data
*/
async compressBackup(data) {
const zlib = require('zlib');
const json = JSON.stringify(data);
return zlib.gzipSync(json);
}
/**
* Decompress backup data
*/
async decompressBackup(compressed) {
const zlib = require('zlib');
const json = zlib.gunzipSync(compressed).toString();
return JSON.parse(json);
}
/**
* Encrypt backup data
*/
async encryptBackup(data, key) {
const algorithm = 'aes-256-gcm';
const keyBuffer = Buffer.from(key, 'hex');
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(algorithm, keyBuffer, iv);
let encrypted = cipher.update(data);
encrypted = Buffer.concat([encrypted, cipher.final()]);
const authTag = cipher.getAuthTag();
// Return: iv:authTag:encrypted (all base64)
return Buffer.from(
iv.toString('base64') + ':' + authTag.toString('base64') + ':' + encrypted.toString('base64')
);
}
/**
* Decrypt backup data
*/
async decryptBackup(encrypted, key) {
const algorithm = 'aes-256-gcm';
const keyBuffer = Buffer.from(key, 'hex');
// Parse format: iv:authTag:encrypted
const parts = encrypted.toString().split(':');
if (parts.length < 3) {
throw new Error('Invalid encrypted backup format');
}
const iv = Buffer.from(parts[0], 'base64');
const authTag = Buffer.from(parts[1], 'base64');
const ciphertext = Buffer.from(parts.slice(2).join(':'), 'base64');
const decipher = crypto.createDecipheriv(algorithm, keyBuffer, iv);
decipher.setAuthTag(authTag);
let decrypted = decipher.update(ciphertext);
decrypted = Buffer.concat([decrypted, decipher.final()]);
return decrypted;
}
/**
* Calculate checksum for backup
*/
calculateChecksum(data) {
return crypto.createHash('sha256').update(data).digest('hex');
}
/**
* Save backup to destination
*/
async saveToDestination(data, destination, backupId) {
switch (destination.type) {
case 'local':
return await this.saveToLocal(data, destination, backupId);
case 'dropbox':
return await this.saveToDropbox(data, destination, backupId);
case 'webdav':
return await this.saveToWebDAV(data, destination, backupId);
case 'sftp':
return await this.saveToSFTP(data, destination, backupId);
default:
throw new Error(`Unsupported destination type: ${destination.type}`);
}
}
/**
* Load encrypted backup blob from a destination location.
* Returns a Buffer that can be passed to decryptBackup/decompressBackup.
*/
async loadFromDestination(location) {
switch (location.type) {
case 'local':
return fs.readFileSync(location.path);
case 'dropbox':
return await this.loadFromDropbox(location);
case 'webdav':
return await this.loadFromWebDAV(location);
case 'sftp':
return await this.loadFromSFTP(location);
default:
throw new Error(`Unsupported destination type: ${location.type}`);
}
}
/**
* Save to local filesystem
*/
async saveToLocal(data, destination, backupId) {
const backupDir = destination.path || DEFAULT_BACKUP_DIR;
// Ensure directory exists
if (!fs.existsSync(backupDir)) {
fs.mkdirSync(backupDir, { recursive: true });
}
const filename = `${backupId}.backup`;
const filepath = path.join(backupDir, filename);
fs.writeFileSync(filepath, data);
return {
type: 'local',
path: filepath,
size: data.length
};
}
// ==================== CLOUD DESTINATIONS ====================
/**
* Resolve credentials for a given provider via the credentialManager.
* Throws if required fields are missing.
*/
async _getCloudCredentials(provider) {
const credentialManager = require('./credential-manager');
const creds = {};
if (provider === 'dropbox') {
creds.token = await credentialManager.retrieve('backup.dropbox.token');
if (!creds.token) throw new Error('Dropbox token not configured');
} else if (provider === 'webdav') {
creds.url = await credentialManager.retrieve('backup.webdav.url');
creds.username = await credentialManager.retrieve('backup.webdav.username');
creds.password = await credentialManager.retrieve('backup.webdav.password');
if (!creds.url || !creds.username || !creds.password) {
throw new Error('WebDAV credentials incomplete (need url, username, password)');
}
} else if (provider === 'sftp') {
creds.host = await credentialManager.retrieve('backup.sftp.host');
const portStr = await credentialManager.retrieve('backup.sftp.port');
creds.port = parseInt(portStr || '22', 10);
creds.username = await credentialManager.retrieve('backup.sftp.username');
creds.password = await credentialManager.retrieve('backup.sftp.password');
creds.privateKey = await credentialManager.retrieve('backup.sftp.privateKey');
if (!creds.host || !creds.username || (!creds.password && !creds.privateKey)) {
throw new Error('SFTP credentials incomplete (need host, username, and either password or privateKey)');
}
}
return creds;
}
// ----- Dropbox -----
async saveToDropbox(data, destination, backupId) {
const { Dropbox } = require('dropbox');
const creds = await this._getCloudCredentials('dropbox');
const dbx = new Dropbox({ accessToken: creds.token });
const folder = (destination.path || '/dashcaddy-backups').replace(/\/+$/, '');
const remotePath = `${folder}/${backupId}.backup`;
await dbx.filesUpload({
path: remotePath,
contents: data,
mode: { '.tag': 'overwrite' },
autorename: false,
mute: true
});
return {
type: 'dropbox',
path: remotePath,
size: data.length
};
}
async loadFromDropbox(location) {
const { Dropbox } = require('dropbox');
const creds = await this._getCloudCredentials('dropbox');
const dbx = new Dropbox({ accessToken: creds.token });
const result = await dbx.filesDownload({ path: location.path });
// Node SDK returns fileBinary on the result
const fileBinary = result.result.fileBinary || result.result.fileBlob;
if (Buffer.isBuffer(fileBinary)) return fileBinary;
return Buffer.from(fileBinary);
}
// ----- WebDAV -----
async saveToWebDAV(data, destination, backupId) {
const { createClient } = require('webdav');
const creds = await this._getCloudCredentials('webdav');
const client = createClient(creds.url, {
username: creds.username,
password: creds.password
});
const folder = (destination.path || '/dashcaddy-backups').replace(/\/+$/, '');
// Ensure folder exists
try {
const exists = await client.exists(folder);
if (!exists) await client.createDirectory(folder, { recursive: true });
} catch (_) {
// best-effort
}
const remotePath = `${folder}/${backupId}.backup`;
await client.putFileContents(remotePath, data, { overwrite: true });
return {
type: 'webdav',
path: remotePath,
size: data.length
};
}
async loadFromWebDAV(location) {
const { createClient } = require('webdav');
const creds = await this._getCloudCredentials('webdav');
const client = createClient(creds.url, {
username: creds.username,
password: creds.password
});
const data = await client.getFileContents(location.path);
return Buffer.isBuffer(data) ? data : Buffer.from(data);
}
// ----- SFTP -----
async saveToSFTP(data, destination, backupId) {
const SftpClient = require('ssh2-sftp-client');
const creds = await this._getCloudCredentials('sftp');
const client = new SftpClient();
try {
await client.connect({
host: creds.host,
port: creds.port,
username: creds.username,
password: creds.password || undefined,
privateKey: creds.privateKey || undefined
});
const folder = (destination.path || '/dashcaddy-backups').replace(/\/+$/, '');
// Ensure remote dir exists
try {
const exists = await client.exists(folder);
if (!exists) await client.mkdir(folder, true);
} catch (_) {
// best-effort
}
const remotePath = `${folder}/${backupId}.backup`;
await client.put(Buffer.from(data), remotePath);
return {
type: 'sftp',
path: remotePath,
size: data.length
};
} finally {
try { await client.end(); } catch (_) {}
}
}
async loadFromSFTP(location) {
const SftpClient = require('ssh2-sftp-client');
const creds = await this._getCloudCredentials('sftp');
const client = new SftpClient();
try {
await client.connect({
host: creds.host,
port: creds.port,
username: creds.username,
password: creds.password || undefined,
privateKey: creds.privateKey || undefined
});
const buffer = await client.get(location.path);
return Buffer.isBuffer(buffer) ? buffer : Buffer.from(buffer);
} finally {
try { await client.end(); } catch (_) {}
}
}
/**
* Test that a destination is reachable + writable + deletable.
* Performs a small write/read/delete probe.
*/
async testDestination(destination) {
const probeId = `test-${Date.now()}`;
const probeData = Buffer.from(`dashcaddy-test-${probeId}`);
const start = Date.now();
try {
const location = await this.saveToDestination(probeData, destination, probeId);
// Read it back
let readBack = null;
try {
readBack = await this.loadFromDestination(location);
} catch (_) {
// Some providers (e.g. local) we already trust the file system; skip
}
// Delete the probe
try {
await this._deleteFromDestination(location);
} catch (_) {}
const elapsed = Date.now() - start;
return {
success: true,
type: destination.type,
elapsedMs: elapsed,
verified: readBack ? readBack.equals(probeData) : null
};
} catch (error) {
return {
success: false,
type: destination.type,
error: error.message,
elapsedMs: Date.now() - start
};
}
}
/**
* Delete a backup from a destination location
*/
async _deleteFromDestination(location) {
if (location.type === 'local') {
if (fs.existsSync(location.path)) fs.unlinkSync(location.path);
return;
}
if (location.type === 'dropbox') {
const { Dropbox } = require('dropbox');
const creds = await this._getCloudCredentials('dropbox');
const dbx = new Dropbox({ accessToken: creds.token });
try { await dbx.filesDeleteV2({ path: location.path }); } catch (_) {}
return;
}
if (location.type === 'webdav') {
const { createClient } = require('webdav');
const creds = await this._getCloudCredentials('webdav');
const client = createClient(creds.url, { username: creds.username, password: creds.password });
try { await client.deleteFile(location.path); } catch (_) {}
return;
}
if (location.type === 'sftp') {
const SftpClient = require('ssh2-sftp-client');
const creds = await this._getCloudCredentials('sftp');
const client = new SftpClient();
try {
await client.connect({
host: creds.host,
port: creds.port,
username: creds.username,
password: creds.password || undefined,
privateKey: creds.privateKey || undefined
});
try { await client.delete(location.path); } catch (_) {}
} finally {
try { await client.end(); } catch (_) {}
}
return;
}
}
/**
* Verify backup integrity
*/
async verifyBackup(location, expectedChecksum) {
if (location.type === 'local') {
const data = fs.readFileSync(location.path);
const checksum = this.calculateChecksum(data);
if (checksum !== expectedChecksum) {
throw new Error('Backup verification failed: checksum mismatch');
}
console.log('[BackupManager] Backup verified successfully');
return true;
}
return true;
}
/**
* Restore from backup
*/
async restoreBackup(backupId, options = {}) {
console.log(`[BackupManager] Starting restore from backup: ${backupId}`);
this.emit('restore-start', { backupId, timestamp: new Date().toISOString() });
try {
// Find backup in history
const backup = this.history.find(b => b.id === backupId);
if (!backup) {
throw new Error(`Backup not found: ${backupId}`);
}
// Load backup data — try each destination location until one succeeds
const location = backup.locations[0]; // Primary location
let data;
try {
data = await this.loadFromDestination(location);
} catch (loadErr) {
// Fall back to other locations if available
let recovered = false;
for (let i = 1; i < backup.locations.length; i++) {
try {
data = await this.loadFromDestination(backup.locations[i]);
recovered = true;
console.log(`[BackupManager] Loaded backup from fallback location ${backup.locations[i].type}`);
break;
} catch (_) {}
}
if (!recovered) throw loadErr;
}
// Decrypt if needed
if (backup.encrypted && options.encryptionKey) {
data = await this.decryptBackup(data, options.encryptionKey);
}
// Decompress
const backupData = await this.decompressBackup(data);
// Verify version compatibility
if (backupData.version !== '1.0') {
throw new Error(`Unsupported backup version: ${backupData.version}`);
}
// Restore data
const restored = {};
if (backupData.data.services && options.restoreServices !== false) {
this.restoreServices(backupData.data.services);
restored.services = true;
}
if (backupData.data.config && options.restoreConfig !== false) {
this.restoreConfig(backupData.data.config);
restored.config = true;
}
if (backupData.data.credentials && options.restoreCredentials !== false) {
this.restoreCredentials(backupData.data.credentials);
restored.credentials = true;
}
if (backupData.data.stats && options.restoreStats !== false) {
this.restoreStats(backupData.data.stats);
restored.stats = true;
}
if (backupData.data.volumes && options.restoreVolumes !== false) {
const volumeResult = await this.restoreVolumes(backupData.data.volumes);
restored.volumes = volumeResult;
}
this.emit('restore-complete', {
backupId,
restored,
timestamp: new Date().toISOString()
});
console.log('[BackupManager] Restore completed successfully');
return { success: true, restored };
} catch (error) {
this.emit('restore-failed', {
backupId,
error: error.message,
timestamp: new Date().toISOString()
});
throw error;
}
}
/**
* Restore services configuration
*/
restoreServices(services) {
const servicesFile = process.env.SERVICES_FILE || path.join(__dirname, 'services.json');
fs.writeFileSync(servicesFile, JSON.stringify(services, null, 2));
console.log('[BackupManager] Services restored');
}
/**
* Restore configuration
*/
restoreConfig(config) {
const configFile = process.env.CONFIG_FILE || path.join(__dirname, 'config.json');
fs.writeFileSync(configFile, JSON.stringify(config, null, 2));
console.log('[BackupManager] Config restored');
}
/**
* Restore credentials
*/
restoreCredentials(credentials) {
const credentialManager = require('./credential-manager');
credentialManager.importBackup(credentials);
console.log('[BackupManager] Credentials restored');
}
/**
* Restore stats
*/
restoreStats(stats) {
const resourceMonitor = require('./resource-monitor');
resourceMonitor.importStats(stats);
console.log('[BackupManager] Stats restored');
}
/**
* Cleanup old backups based on retention policy
*/
async cleanupOldBackups(name, retention) {
const backups = this.history.filter(b => b.name === name && b.status === 'success');
// Sort by timestamp (newest first)
backups.sort((a, b) => new Date(b.timestamp) - new Date(a.timestamp));
// Keep only the specified number of backups
const toDelete = backups.slice(retention.keep || 7);
for (const backup of toDelete) {
try {
// Delete from all locations (local + cloud)
for (const location of backup.locations) {
try {
await this._deleteFromDestination(location);
} catch (delErr) {
console.warn(`[BackupManager] Could not delete ${location.type} location for ${backup.id}:`, delErr.message);
}
}
// Remove from history
this.history = this.history.filter(b => b.id !== backup.id);
console.log(`[BackupManager] Deleted old backup: ${backup.id}`);
} catch (error) {
console.error(`[BackupManager] Error deleting backup ${backup.id}:`, error.message);
}
}
this.saveHistory();
}
/**
* Add entry to backup history
*/
addToHistory(entry) {
this.history.push(entry);
// Keep only last 100 entries
if (this.history.length > 100) {
this.history = this.history.slice(-100);
}
this.saveHistory();
}
/**
* Get backup history
*/
getHistory(limit = 50) {
return this.history.slice(-limit).reverse();
}
/**
* Get backup configuration
*/
getConfig() {
return this.config;
}
/**
* Update backup configuration
*/
updateConfig(config) {
this.config = { ...this.config, ...config };
this.saveConfig();
// Restart scheduler to apply changes
this.stop();
this.start();
}
/**
* Load configuration from disk
*/
loadConfig() {
try {
if (fs.existsSync(BACKUP_CONFIG_FILE)) {
return JSON.parse(fs.readFileSync(BACKUP_CONFIG_FILE, 'utf8'));
}
} catch (error) {
console.error('[BackupManager] Error loading config:', error.message);
}
return {
backups: {},
defaultRetention: { keep: 7 }
};
}
/**
* Save configuration to disk
*/
saveConfig() {
try {
fs.writeFileSync(BACKUP_CONFIG_FILE, JSON.stringify(this.config, null, 2));
} catch (error) {
console.error('[BackupManager] Error saving config:', error.message);
}
}
/**
* Load history from disk
*/
loadHistory() {
try {
if (fs.existsSync(BACKUP_HISTORY_FILE)) {
return JSON.parse(fs.readFileSync(BACKUP_HISTORY_FILE, 'utf8'));
}
} catch (error) {
console.error('[BackupManager] Error loading history:', error.message);
}
return [];
}
/**
* Save history to disk
*/
saveHistory() {
try {
fs.writeFileSync(BACKUP_HISTORY_FILE, JSON.stringify(this.history, null, 2));
} catch (error) {
console.error('[BackupManager] Error saving history:', error.message);
}
}
}
// Export singleton instance
module.exports = new BackupManager();