Initial commit: DashCaddy v1.0
Full codebase including API server (32 modules + routes), dashboard frontend, DashCA certificate distribution, installer script, and deployment skills.
This commit is contained in:
835
dashcaddy-api/backup-manager.js
Normal file
835
dashcaddy-api/backup-manager.js
Normal file
@@ -0,0 +1,835 @@
|
||||
/**
|
||||
* Automated Backup & Restore Manager
|
||||
* Handles scheduled backups with local storage
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { execSync } = require('child_process');
|
||||
const crypto = require('crypto');
|
||||
const EventEmitter = require('events');
|
||||
|
||||
const BACKUP_CONFIG_FILE = process.env.BACKUP_CONFIG_FILE || path.join(__dirname, 'backup-config.json');
|
||||
const BACKUP_HISTORY_FILE = process.env.BACKUP_HISTORY_FILE || path.join(__dirname, 'backup-history.json');
|
||||
const DEFAULT_BACKUP_DIR = process.env.BACKUP_DIR || path.join(__dirname, 'backups');
|
||||
|
||||
class BackupManager extends EventEmitter {
|
||||
constructor() {
|
||||
super();
|
||||
this.config = this.loadConfig();
|
||||
this.history = this.loadHistory();
|
||||
this.scheduledJobs = new Map();
|
||||
this.running = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start backup scheduler
|
||||
*/
|
||||
start() {
|
||||
if (this.running) return;
|
||||
|
||||
console.log('[BackupManager] Starting backup scheduler');
|
||||
this.running = true;
|
||||
|
||||
// Schedule all configured backups
|
||||
for (const [name, backup] of Object.entries(this.config.backups || {})) {
|
||||
if (backup.enabled && backup.schedule) {
|
||||
this.scheduleBackup(name, backup);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop backup scheduler
|
||||
*/
|
||||
stop() {
|
||||
if (!this.running) return;
|
||||
|
||||
console.log('[BackupManager] Stopping backup scheduler');
|
||||
this.running = false;
|
||||
|
||||
// Clear all scheduled jobs
|
||||
for (const [name, job] of this.scheduledJobs.entries()) {
|
||||
clearInterval(job);
|
||||
}
|
||||
this.scheduledJobs.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Schedule a backup job
|
||||
*/
|
||||
scheduleBackup(name, backup) {
|
||||
// Parse schedule (cron-like: daily, weekly, monthly, or interval in minutes)
|
||||
let intervalMs;
|
||||
|
||||
switch (backup.schedule) {
|
||||
case 'hourly':
|
||||
intervalMs = 60 * 60 * 1000;
|
||||
break;
|
||||
case 'daily':
|
||||
intervalMs = 24 * 60 * 60 * 1000;
|
||||
break;
|
||||
case 'weekly':
|
||||
intervalMs = 7 * 24 * 60 * 60 * 1000;
|
||||
break;
|
||||
case 'monthly':
|
||||
intervalMs = 30 * 24 * 60 * 60 * 1000;
|
||||
break;
|
||||
default:
|
||||
// Custom interval in minutes
|
||||
const minutes = parseInt(backup.schedule, 10);
|
||||
if (!isNaN(minutes) && minutes > 0) {
|
||||
intervalMs = minutes * 60 * 1000;
|
||||
} else {
|
||||
console.error(`[BackupManager] Invalid schedule for ${name}: ${backup.schedule}`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Schedule the job
|
||||
const job = setInterval(() => {
|
||||
this.executeBackup(name, backup).catch(error => {
|
||||
console.error(`[BackupManager] Scheduled backup ${name} failed:`, error.message);
|
||||
});
|
||||
}, intervalMs);
|
||||
|
||||
this.scheduledJobs.set(name, job);
|
||||
console.log(`[BackupManager] Scheduled backup '${name}' every ${backup.schedule}`);
|
||||
|
||||
// Run immediately if configured
|
||||
if (backup.runImmediately) {
|
||||
this.executeBackup(name, backup).catch(error => {
|
||||
console.error(`[BackupManager] Initial backup ${name} failed:`, error.message);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a backup
|
||||
*/
|
||||
async executeBackup(name, backup) {
|
||||
const startTime = Date.now();
|
||||
const backupId = `${name}-${Date.now()}`;
|
||||
|
||||
console.log(`[BackupManager] Starting backup: ${name}`);
|
||||
|
||||
this.emit('backup-start', { name, backupId, timestamp: new Date().toISOString() });
|
||||
|
||||
try {
|
||||
// Create backup data
|
||||
const backupData = await this.createBackupData(backup.include || ['all']);
|
||||
|
||||
// Compress backup
|
||||
const compressed = await this.compressBackup(backupData);
|
||||
|
||||
// Encrypt if configured
|
||||
let finalData = compressed;
|
||||
if (backup.encrypt && backup.encryptionKey) {
|
||||
finalData = await this.encryptBackup(compressed, backup.encryptionKey);
|
||||
}
|
||||
|
||||
// Calculate checksum
|
||||
const checksum = this.calculateChecksum(finalData);
|
||||
|
||||
// Save to destinations
|
||||
const destinations = backup.destinations || [{ type: 'local' }];
|
||||
const savedLocations = [];
|
||||
|
||||
for (const dest of destinations) {
|
||||
try {
|
||||
const location = await this.saveToDestination(finalData, dest, backupId);
|
||||
savedLocations.push(location);
|
||||
} catch (error) {
|
||||
console.error(`[BackupManager] Failed to save to ${dest.type}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
if (savedLocations.length === 0) {
|
||||
throw new Error('Failed to save backup to any destination');
|
||||
}
|
||||
|
||||
// Verify backup
|
||||
if (backup.verify !== false) {
|
||||
await this.verifyBackup(savedLocations[0], checksum);
|
||||
}
|
||||
|
||||
// Record in history
|
||||
const duration = Date.now() - startTime;
|
||||
const historyEntry = {
|
||||
id: backupId,
|
||||
name,
|
||||
timestamp: new Date().toISOString(),
|
||||
duration,
|
||||
size: finalData.length,
|
||||
checksum,
|
||||
locations: savedLocations,
|
||||
encrypted: !!backup.encrypt,
|
||||
compressed: true,
|
||||
status: 'success'
|
||||
};
|
||||
|
||||
this.addToHistory(historyEntry);
|
||||
|
||||
// Cleanup old backups
|
||||
if (backup.retention) {
|
||||
await this.cleanupOldBackups(name, backup.retention);
|
||||
}
|
||||
|
||||
this.emit('backup-complete', historyEntry);
|
||||
console.log(`[BackupManager] Backup ${name} completed in ${duration}ms`);
|
||||
|
||||
return historyEntry;
|
||||
} catch (error) {
|
||||
const duration = Date.now() - startTime;
|
||||
const historyEntry = {
|
||||
id: backupId,
|
||||
name,
|
||||
timestamp: new Date().toISOString(),
|
||||
duration,
|
||||
status: 'failed',
|
||||
error: error.message
|
||||
};
|
||||
|
||||
this.addToHistory(historyEntry);
|
||||
this.emit('backup-failed', historyEntry);
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create backup data from specified sources
|
||||
*/
|
||||
async createBackupData(include) {
|
||||
const data = {
|
||||
version: '1.0',
|
||||
timestamp: new Date().toISOString(),
|
||||
hostname: require('os').hostname(),
|
||||
data: {}
|
||||
};
|
||||
|
||||
for (const source of include) {
|
||||
switch (source) {
|
||||
case 'all':
|
||||
data.data.services = this.backupServices();
|
||||
data.data.config = this.backupConfig();
|
||||
data.data.credentials = this.backupCredentials();
|
||||
data.data.stats = this.backupStats();
|
||||
break;
|
||||
case 'services':
|
||||
data.data.services = this.backupServices();
|
||||
break;
|
||||
case 'config':
|
||||
data.data.config = this.backupConfig();
|
||||
break;
|
||||
case 'credentials':
|
||||
data.data.credentials = this.backupCredentials();
|
||||
break;
|
||||
case 'stats':
|
||||
data.data.stats = this.backupStats();
|
||||
break;
|
||||
case 'volumes':
|
||||
data.data.volumes = await this.backupVolumes();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Backup services configuration
|
||||
*/
|
||||
backupServices() {
|
||||
try {
|
||||
const servicesFile = process.env.SERVICES_FILE || path.join(__dirname, 'services.json');
|
||||
if (fs.existsSync(servicesFile)) {
|
||||
return JSON.parse(fs.readFileSync(servicesFile, 'utf8'));
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[BackupManager] Error backing up services:', error.message);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Backup configuration files
|
||||
*/
|
||||
backupConfig() {
|
||||
try {
|
||||
const configFile = process.env.CONFIG_FILE || path.join(__dirname, 'config.json');
|
||||
if (fs.existsSync(configFile)) {
|
||||
return JSON.parse(fs.readFileSync(configFile, 'utf8'));
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[BackupManager] Error backing up config:', error.message);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Backup credentials (encrypted)
|
||||
*/
|
||||
backupCredentials() {
|
||||
try {
|
||||
const credentialManager = require('./credential-manager');
|
||||
return credentialManager.exportBackup();
|
||||
} catch (error) {
|
||||
console.error('[BackupManager] Error backing up credentials:', error.message);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Backup resource stats
|
||||
*/
|
||||
backupStats() {
|
||||
try {
|
||||
const resourceMonitor = require('./resource-monitor');
|
||||
return resourceMonitor.exportStats();
|
||||
} catch (error) {
|
||||
console.error('[BackupManager] Error backing up stats:', error.message);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Backup Docker volumes
|
||||
* Creates tar archives of Docker volumes for backup
|
||||
* @returns {Object|null} Volume backup metadata or null on failure
|
||||
*/
|
||||
async backupVolumes() {
|
||||
try {
|
||||
const Docker = require('dockerode');
|
||||
const docker = new Docker();
|
||||
|
||||
// Get list of all volumes
|
||||
const volumeData = await docker.listVolumes();
|
||||
const volumes = volumeData.Volumes || [];
|
||||
|
||||
if (volumes.length === 0) {
|
||||
return { volumes: [], message: 'No volumes found' };
|
||||
}
|
||||
|
||||
const backupDir = path.join(DEFAULT_BACKUP_DIR, 'volumes');
|
||||
if (!fs.existsSync(backupDir)) {
|
||||
fs.mkdirSync(backupDir, { recursive: true });
|
||||
}
|
||||
|
||||
const timestamp = Date.now();
|
||||
const backupResults = [];
|
||||
|
||||
for (const volume of volumes) {
|
||||
try {
|
||||
const volumeName = volume.Name;
|
||||
const backupFile = path.join(backupDir, `${volumeName}-${timestamp}.tar.gz`);
|
||||
|
||||
// Create a temporary container to backup the volume
|
||||
// Using alpine with tar to create the archive
|
||||
const container = await docker.createContainer({
|
||||
Image: 'alpine:latest',
|
||||
Cmd: ['tar', 'czf', '/backup/volume.tar.gz', '-C', '/volume', '.'],
|
||||
HostConfig: {
|
||||
Binds: [
|
||||
`${volumeName}:/volume:ro`,
|
||||
`${backupDir}:/backup`
|
||||
],
|
||||
AutoRemove: true
|
||||
}
|
||||
});
|
||||
|
||||
// Start and wait for completion
|
||||
await container.start();
|
||||
await container.wait();
|
||||
|
||||
// Rename the backup file to include volume name
|
||||
const tempFile = path.join(backupDir, 'volume.tar.gz');
|
||||
if (fs.existsSync(tempFile)) {
|
||||
fs.renameSync(tempFile, backupFile);
|
||||
|
||||
const stats = fs.statSync(backupFile);
|
||||
backupResults.push({
|
||||
name: volumeName,
|
||||
driver: volume.Driver,
|
||||
path: backupFile,
|
||||
size: stats.size,
|
||||
timestamp: new Date().toISOString(),
|
||||
status: 'success'
|
||||
});
|
||||
}
|
||||
} catch (volumeError) {
|
||||
console.error(`[BackupManager] Error backing up volume ${volume.Name}:`, volumeError.message);
|
||||
backupResults.push({
|
||||
name: volume.Name,
|
||||
status: 'failed',
|
||||
error: volumeError.message
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
timestamp: new Date().toISOString(),
|
||||
totalVolumes: volumes.length,
|
||||
successCount: backupResults.filter(r => r.status === 'success').length,
|
||||
volumes: backupResults
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('[BackupManager] Error backing up volumes:', error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore Docker volumes from backup
|
||||
* @param {Object} volumeBackup - Volume backup metadata from backupVolumes()
|
||||
* @returns {Object} Restore results
|
||||
*/
|
||||
async restoreVolumes(volumeBackup) {
|
||||
if (!volumeBackup || !volumeBackup.volumes) {
|
||||
throw new Error('Invalid volume backup data');
|
||||
}
|
||||
|
||||
const Docker = require('dockerode');
|
||||
const docker = new Docker();
|
||||
const restoreResults = [];
|
||||
|
||||
for (const volBackup of volumeBackup.volumes) {
|
||||
if (volBackup.status !== 'success' || !volBackup.path) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if backup file exists
|
||||
if (!fs.existsSync(volBackup.path)) {
|
||||
throw new Error(`Backup file not found: ${volBackup.path}`);
|
||||
}
|
||||
|
||||
const volumeName = volBackup.name;
|
||||
const backupDir = path.dirname(volBackup.path);
|
||||
|
||||
// Create volume if it doesn't exist
|
||||
try {
|
||||
await docker.createVolume({ Name: volumeName });
|
||||
} catch (e) {
|
||||
// Volume might already exist, that's OK
|
||||
}
|
||||
|
||||
// Copy backup file to a temp name for mounting
|
||||
const tempBackupFile = path.join(backupDir, 'restore-volume.tar.gz');
|
||||
fs.copyFileSync(volBackup.path, tempBackupFile);
|
||||
|
||||
// Create container to restore the volume
|
||||
const container = await docker.createContainer({
|
||||
Image: 'alpine:latest',
|
||||
Cmd: ['sh', '-c', 'rm -rf /volume/* && tar xzf /backup/restore-volume.tar.gz -C /volume'],
|
||||
HostConfig: {
|
||||
Binds: [
|
||||
`${volumeName}:/volume`,
|
||||
`${backupDir}:/backup:ro`
|
||||
],
|
||||
AutoRemove: true
|
||||
}
|
||||
});
|
||||
|
||||
await container.start();
|
||||
await container.wait();
|
||||
|
||||
// Clean up temp file
|
||||
if (fs.existsSync(tempBackupFile)) {
|
||||
fs.unlinkSync(tempBackupFile);
|
||||
}
|
||||
|
||||
restoreResults.push({
|
||||
name: volumeName,
|
||||
status: 'success',
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
|
||||
console.log(`[BackupManager] Volume ${volumeName} restored successfully`);
|
||||
} catch (restoreError) {
|
||||
console.error(`[BackupManager] Error restoring volume ${volBackup.name}:`, restoreError.message);
|
||||
restoreResults.push({
|
||||
name: volBackup.name,
|
||||
status: 'failed',
|
||||
error: restoreError.message
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
timestamp: new Date().toISOString(),
|
||||
results: restoreResults,
|
||||
successCount: restoreResults.filter(r => r.status === 'success').length,
|
||||
failedCount: restoreResults.filter(r => r.status === 'failed').length
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Compress backup data
|
||||
*/
|
||||
async compressBackup(data) {
|
||||
const zlib = require('zlib');
|
||||
const json = JSON.stringify(data);
|
||||
return zlib.gzipSync(json);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decompress backup data
|
||||
*/
|
||||
async decompressBackup(compressed) {
|
||||
const zlib = require('zlib');
|
||||
const json = zlib.gunzipSync(compressed).toString();
|
||||
return JSON.parse(json);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypt backup data
|
||||
*/
|
||||
async encryptBackup(data, key) {
|
||||
const algorithm = 'aes-256-gcm';
|
||||
const keyBuffer = Buffer.from(key, 'hex');
|
||||
const iv = crypto.randomBytes(16);
|
||||
|
||||
const cipher = crypto.createCipheriv(algorithm, keyBuffer, iv);
|
||||
let encrypted = cipher.update(data);
|
||||
encrypted = Buffer.concat([encrypted, cipher.final()]);
|
||||
|
||||
const authTag = cipher.getAuthTag();
|
||||
|
||||
// Return: iv:authTag:encrypted (all base64)
|
||||
return Buffer.from(
|
||||
iv.toString('base64') + ':' + authTag.toString('base64') + ':' + encrypted.toString('base64')
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrypt backup data
|
||||
*/
|
||||
async decryptBackup(encrypted, key) {
|
||||
const algorithm = 'aes-256-gcm';
|
||||
const keyBuffer = Buffer.from(key, 'hex');
|
||||
|
||||
// Parse format: iv:authTag:encrypted
|
||||
const parts = encrypted.toString().split(':');
|
||||
if (parts.length < 3) {
|
||||
throw new Error('Invalid encrypted backup format');
|
||||
}
|
||||
|
||||
const iv = Buffer.from(parts[0], 'base64');
|
||||
const authTag = Buffer.from(parts[1], 'base64');
|
||||
const ciphertext = Buffer.from(parts.slice(2).join(':'), 'base64');
|
||||
|
||||
const decipher = crypto.createDecipheriv(algorithm, keyBuffer, iv);
|
||||
decipher.setAuthTag(authTag);
|
||||
|
||||
let decrypted = decipher.update(ciphertext);
|
||||
decrypted = Buffer.concat([decrypted, decipher.final()]);
|
||||
|
||||
return decrypted;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate checksum for backup
|
||||
*/
|
||||
calculateChecksum(data) {
|
||||
return crypto.createHash('sha256').update(data).digest('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Save backup to destination
|
||||
*/
|
||||
async saveToDestination(data, destination, backupId) {
|
||||
switch (destination.type) {
|
||||
case 'local':
|
||||
return await this.saveToLocal(data, destination, backupId);
|
||||
default:
|
||||
throw new Error(`Unsupported destination type: ${destination.type}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save to local filesystem
|
||||
*/
|
||||
async saveToLocal(data, destination, backupId) {
|
||||
const backupDir = destination.path || DEFAULT_BACKUP_DIR;
|
||||
|
||||
// Ensure directory exists
|
||||
if (!fs.existsSync(backupDir)) {
|
||||
fs.mkdirSync(backupDir, { recursive: true });
|
||||
}
|
||||
|
||||
const filename = `${backupId}.backup`;
|
||||
const filepath = path.join(backupDir, filename);
|
||||
|
||||
fs.writeFileSync(filepath, data);
|
||||
|
||||
return {
|
||||
type: 'local',
|
||||
path: filepath,
|
||||
size: data.length
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify backup integrity
|
||||
*/
|
||||
async verifyBackup(location, expectedChecksum) {
|
||||
if (location.type === 'local') {
|
||||
const data = fs.readFileSync(location.path);
|
||||
const checksum = this.calculateChecksum(data);
|
||||
|
||||
if (checksum !== expectedChecksum) {
|
||||
throw new Error('Backup verification failed: checksum mismatch');
|
||||
}
|
||||
|
||||
console.log('[BackupManager] Backup verified successfully');
|
||||
return true;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore from backup
|
||||
*/
|
||||
async restoreBackup(backupId, options = {}) {
|
||||
console.log(`[BackupManager] Starting restore from backup: ${backupId}`);
|
||||
|
||||
this.emit('restore-start', { backupId, timestamp: new Date().toISOString() });
|
||||
|
||||
try {
|
||||
// Find backup in history
|
||||
const backup = this.history.find(b => b.id === backupId);
|
||||
if (!backup) {
|
||||
throw new Error(`Backup not found: ${backupId}`);
|
||||
}
|
||||
|
||||
// Load backup data
|
||||
const location = backup.locations[0]; // Use first location
|
||||
let data = fs.readFileSync(location.path);
|
||||
|
||||
// Decrypt if needed
|
||||
if (backup.encrypted && options.encryptionKey) {
|
||||
data = await this.decryptBackup(data, options.encryptionKey);
|
||||
}
|
||||
|
||||
// Decompress
|
||||
const backupData = await this.decompressBackup(data);
|
||||
|
||||
// Verify version compatibility
|
||||
if (backupData.version !== '1.0') {
|
||||
throw new Error(`Unsupported backup version: ${backupData.version}`);
|
||||
}
|
||||
|
||||
// Restore data
|
||||
const restored = {};
|
||||
|
||||
if (backupData.data.services && options.restoreServices !== false) {
|
||||
this.restoreServices(backupData.data.services);
|
||||
restored.services = true;
|
||||
}
|
||||
|
||||
if (backupData.data.config && options.restoreConfig !== false) {
|
||||
this.restoreConfig(backupData.data.config);
|
||||
restored.config = true;
|
||||
}
|
||||
|
||||
if (backupData.data.credentials && options.restoreCredentials !== false) {
|
||||
this.restoreCredentials(backupData.data.credentials);
|
||||
restored.credentials = true;
|
||||
}
|
||||
|
||||
if (backupData.data.stats && options.restoreStats !== false) {
|
||||
this.restoreStats(backupData.data.stats);
|
||||
restored.stats = true;
|
||||
}
|
||||
|
||||
if (backupData.data.volumes && options.restoreVolumes !== false) {
|
||||
const volumeResult = await this.restoreVolumes(backupData.data.volumes);
|
||||
restored.volumes = volumeResult;
|
||||
}
|
||||
|
||||
this.emit('restore-complete', {
|
||||
backupId,
|
||||
restored,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
|
||||
console.log('[BackupManager] Restore completed successfully');
|
||||
return { success: true, restored };
|
||||
} catch (error) {
|
||||
this.emit('restore-failed', {
|
||||
backupId,
|
||||
error: error.message,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore services configuration
|
||||
*/
|
||||
restoreServices(services) {
|
||||
const servicesFile = process.env.SERVICES_FILE || path.join(__dirname, 'services.json');
|
||||
fs.writeFileSync(servicesFile, JSON.stringify(services, null, 2));
|
||||
console.log('[BackupManager] Services restored');
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore configuration
|
||||
*/
|
||||
restoreConfig(config) {
|
||||
const configFile = process.env.CONFIG_FILE || path.join(__dirname, 'config.json');
|
||||
fs.writeFileSync(configFile, JSON.stringify(config, null, 2));
|
||||
console.log('[BackupManager] Config restored');
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore credentials
|
||||
*/
|
||||
restoreCredentials(credentials) {
|
||||
const credentialManager = require('./credential-manager');
|
||||
credentialManager.importBackup(credentials);
|
||||
console.log('[BackupManager] Credentials restored');
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore stats
|
||||
*/
|
||||
restoreStats(stats) {
|
||||
const resourceMonitor = require('./resource-monitor');
|
||||
resourceMonitor.importStats(stats);
|
||||
console.log('[BackupManager] Stats restored');
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup old backups based on retention policy
|
||||
*/
|
||||
async cleanupOldBackups(name, retention) {
|
||||
const backups = this.history.filter(b => b.name === name && b.status === 'success');
|
||||
|
||||
// Sort by timestamp (newest first)
|
||||
backups.sort((a, b) => new Date(b.timestamp) - new Date(a.timestamp));
|
||||
|
||||
// Keep only the specified number of backups
|
||||
const toDelete = backups.slice(retention.keep || 7);
|
||||
|
||||
for (const backup of toDelete) {
|
||||
try {
|
||||
// Delete from all locations
|
||||
for (const location of backup.locations) {
|
||||
if (location.type === 'local' && fs.existsSync(location.path)) {
|
||||
fs.unlinkSync(location.path);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove from history
|
||||
this.history = this.history.filter(b => b.id !== backup.id);
|
||||
|
||||
console.log(`[BackupManager] Deleted old backup: ${backup.id}`);
|
||||
} catch (error) {
|
||||
console.error(`[BackupManager] Error deleting backup ${backup.id}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
this.saveHistory();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add entry to backup history
|
||||
*/
|
||||
addToHistory(entry) {
|
||||
this.history.push(entry);
|
||||
|
||||
// Keep only last 100 entries
|
||||
if (this.history.length > 100) {
|
||||
this.history = this.history.slice(-100);
|
||||
}
|
||||
|
||||
this.saveHistory();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get backup history
|
||||
*/
|
||||
getHistory(limit = 50) {
|
||||
return this.history.slice(-limit).reverse();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get backup configuration
|
||||
*/
|
||||
getConfig() {
|
||||
return this.config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update backup configuration
|
||||
*/
|
||||
updateConfig(config) {
|
||||
this.config = { ...this.config, ...config };
|
||||
this.saveConfig();
|
||||
|
||||
// Restart scheduler to apply changes
|
||||
this.stop();
|
||||
this.start();
|
||||
}
|
||||
|
||||
/**
|
||||
* Load configuration from disk
|
||||
*/
|
||||
loadConfig() {
|
||||
try {
|
||||
if (fs.existsSync(BACKUP_CONFIG_FILE)) {
|
||||
return JSON.parse(fs.readFileSync(BACKUP_CONFIG_FILE, 'utf8'));
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[BackupManager] Error loading config:', error.message);
|
||||
}
|
||||
|
||||
return {
|
||||
backups: {},
|
||||
defaultRetention: { keep: 7 }
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Save configuration to disk
|
||||
*/
|
||||
saveConfig() {
|
||||
try {
|
||||
fs.writeFileSync(BACKUP_CONFIG_FILE, JSON.stringify(this.config, null, 2));
|
||||
} catch (error) {
|
||||
console.error('[BackupManager] Error saving config:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load history from disk
|
||||
*/
|
||||
loadHistory() {
|
||||
try {
|
||||
if (fs.existsSync(BACKUP_HISTORY_FILE)) {
|
||||
return JSON.parse(fs.readFileSync(BACKUP_HISTORY_FILE, 'utf8'));
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[BackupManager] Error loading history:', error.message);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Save history to disk
|
||||
*/
|
||||
saveHistory() {
|
||||
try {
|
||||
fs.writeFileSync(BACKUP_HISTORY_FILE, JSON.stringify(this.history, null, 2));
|
||||
} catch (error) {
|
||||
console.error('[BackupManager] Error saving history:', error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Export singleton instance
|
||||
module.exports = new BackupManager();
|
||||
Reference in New Issue
Block a user