feat: cloud backup destinations + long-term resource history
Cloud backups (Dropbox / WebDAV / SFTP):
- backup-manager.js: save + load handlers per provider, credential
resolution via credentialManager, destination probe.
- routes/backups.js: /credentials/{provider} (masked GET, POST, DELETE),
/test-destination, scheduling endpoints.
- status/js/backup-restore.js: destination picker, provider-specific
credential forms, test button wired to backend probe.
- npm deps already present (dropbox 10.34.0, webdav 5.7.1,
ssh2-sftp-client 11.0.0).
Resource history:
- resource-monitor.js: three-tier rollup storage — raw 10s samples
(7-day retention), hourly rollups (30-day), daily rollups
(365-day). getHistoryByRange() auto-selects the appropriate tier.
- routes/monitoring.js: /monitoring/history/:containerId now supports
startTime/endTime range mode (legacy ?hours=N still works).
- status/js/resource-monitor.js + dashboard.css: "History" tab with
range buttons (1h/24h/7d/30d/1y), SVG sparklines for
CPU / memory / network. Renderer handles raw and rolled-up shapes.
status/dist/features.js rebuilt from source via build.js.
Lifted out of wip/cloud-backups-and-history; the half-finished
app-deps feature from that branch (frontend calls /api/v1/apps/
check-dependencies but the endpoint doesn't exist) is preserved
separately on wip/app-deps for later.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -543,17 +543,42 @@ class BackupManager extends EventEmitter {
|
||||
switch (destination.type) {
|
||||
case 'local':
|
||||
return await this.saveToLocal(data, destination, backupId);
|
||||
case 'dropbox':
|
||||
return await this.saveToDropbox(data, destination, backupId);
|
||||
case 'webdav':
|
||||
return await this.saveToWebDAV(data, destination, backupId);
|
||||
case 'sftp':
|
||||
return await this.saveToSFTP(data, destination, backupId);
|
||||
default:
|
||||
throw new Error(`Unsupported destination type: ${destination.type}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load encrypted backup blob from a destination location.
|
||||
* Returns a Buffer that can be passed to decryptBackup/decompressBackup.
|
||||
*/
|
||||
async loadFromDestination(location) {
|
||||
switch (location.type) {
|
||||
case 'local':
|
||||
return fs.readFileSync(location.path);
|
||||
case 'dropbox':
|
||||
return await this.loadFromDropbox(location);
|
||||
case 'webdav':
|
||||
return await this.loadFromWebDAV(location);
|
||||
case 'sftp':
|
||||
return await this.loadFromSFTP(location);
|
||||
default:
|
||||
throw new Error(`Unsupported destination type: ${location.type}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save to local filesystem
|
||||
*/
|
||||
async saveToLocal(data, destination, backupId) {
|
||||
const backupDir = destination.path || DEFAULT_BACKUP_DIR;
|
||||
|
||||
|
||||
// Ensure directory exists
|
||||
if (!fs.existsSync(backupDir)) {
|
||||
fs.mkdirSync(backupDir, { recursive: true });
|
||||
@@ -561,9 +586,9 @@ class BackupManager extends EventEmitter {
|
||||
|
||||
const filename = `${backupId}.backup`;
|
||||
const filepath = path.join(backupDir, filename);
|
||||
|
||||
|
||||
fs.writeFileSync(filepath, data);
|
||||
|
||||
|
||||
return {
|
||||
type: 'local',
|
||||
path: filepath,
|
||||
@@ -571,6 +596,257 @@ class BackupManager extends EventEmitter {
|
||||
};
|
||||
}
|
||||
|
||||
// ==================== CLOUD DESTINATIONS ====================
|
||||
|
||||
/**
|
||||
* Resolve credentials for a given provider via the credentialManager.
|
||||
* Throws if required fields are missing.
|
||||
*/
|
||||
async _getCloudCredentials(provider) {
|
||||
const credentialManager = require('./credential-manager');
|
||||
const creds = {};
|
||||
if (provider === 'dropbox') {
|
||||
creds.token = await credentialManager.retrieve('backup.dropbox.token');
|
||||
if (!creds.token) throw new Error('Dropbox token not configured');
|
||||
} else if (provider === 'webdav') {
|
||||
creds.url = await credentialManager.retrieve('backup.webdav.url');
|
||||
creds.username = await credentialManager.retrieve('backup.webdav.username');
|
||||
creds.password = await credentialManager.retrieve('backup.webdav.password');
|
||||
if (!creds.url || !creds.username || !creds.password) {
|
||||
throw new Error('WebDAV credentials incomplete (need url, username, password)');
|
||||
}
|
||||
} else if (provider === 'sftp') {
|
||||
creds.host = await credentialManager.retrieve('backup.sftp.host');
|
||||
const portStr = await credentialManager.retrieve('backup.sftp.port');
|
||||
creds.port = parseInt(portStr || '22', 10);
|
||||
creds.username = await credentialManager.retrieve('backup.sftp.username');
|
||||
creds.password = await credentialManager.retrieve('backup.sftp.password');
|
||||
creds.privateKey = await credentialManager.retrieve('backup.sftp.privateKey');
|
||||
if (!creds.host || !creds.username || (!creds.password && !creds.privateKey)) {
|
||||
throw new Error('SFTP credentials incomplete (need host, username, and either password or privateKey)');
|
||||
}
|
||||
}
|
||||
return creds;
|
||||
}
|
||||
|
||||
// ----- Dropbox -----
|
||||
|
||||
async saveToDropbox(data, destination, backupId) {
|
||||
const { Dropbox } = require('dropbox');
|
||||
const creds = await this._getCloudCredentials('dropbox');
|
||||
const dbx = new Dropbox({ accessToken: creds.token });
|
||||
|
||||
const folder = (destination.path || '/dashcaddy-backups').replace(/\/+$/, '');
|
||||
const remotePath = `${folder}/${backupId}.backup`;
|
||||
|
||||
await dbx.filesUpload({
|
||||
path: remotePath,
|
||||
contents: data,
|
||||
mode: { '.tag': 'overwrite' },
|
||||
autorename: false,
|
||||
mute: true
|
||||
});
|
||||
|
||||
return {
|
||||
type: 'dropbox',
|
||||
path: remotePath,
|
||||
size: data.length
|
||||
};
|
||||
}
|
||||
|
||||
async loadFromDropbox(location) {
|
||||
const { Dropbox } = require('dropbox');
|
||||
const creds = await this._getCloudCredentials('dropbox');
|
||||
const dbx = new Dropbox({ accessToken: creds.token });
|
||||
const result = await dbx.filesDownload({ path: location.path });
|
||||
// Node SDK returns fileBinary on the result
|
||||
const fileBinary = result.result.fileBinary || result.result.fileBlob;
|
||||
if (Buffer.isBuffer(fileBinary)) return fileBinary;
|
||||
return Buffer.from(fileBinary);
|
||||
}
|
||||
|
||||
// ----- WebDAV -----
|
||||
|
||||
async saveToWebDAV(data, destination, backupId) {
|
||||
const { createClient } = require('webdav');
|
||||
const creds = await this._getCloudCredentials('webdav');
|
||||
const client = createClient(creds.url, {
|
||||
username: creds.username,
|
||||
password: creds.password
|
||||
});
|
||||
|
||||
const folder = (destination.path || '/dashcaddy-backups').replace(/\/+$/, '');
|
||||
|
||||
// Ensure folder exists
|
||||
try {
|
||||
const exists = await client.exists(folder);
|
||||
if (!exists) await client.createDirectory(folder, { recursive: true });
|
||||
} catch (_) {
|
||||
// best-effort
|
||||
}
|
||||
|
||||
const remotePath = `${folder}/${backupId}.backup`;
|
||||
await client.putFileContents(remotePath, data, { overwrite: true });
|
||||
|
||||
return {
|
||||
type: 'webdav',
|
||||
path: remotePath,
|
||||
size: data.length
|
||||
};
|
||||
}
|
||||
|
||||
async loadFromWebDAV(location) {
|
||||
const { createClient } = require('webdav');
|
||||
const creds = await this._getCloudCredentials('webdav');
|
||||
const client = createClient(creds.url, {
|
||||
username: creds.username,
|
||||
password: creds.password
|
||||
});
|
||||
const data = await client.getFileContents(location.path);
|
||||
return Buffer.isBuffer(data) ? data : Buffer.from(data);
|
||||
}
|
||||
|
||||
// ----- SFTP -----
|
||||
|
||||
async saveToSFTP(data, destination, backupId) {
|
||||
const SftpClient = require('ssh2-sftp-client');
|
||||
const creds = await this._getCloudCredentials('sftp');
|
||||
const client = new SftpClient();
|
||||
|
||||
try {
|
||||
await client.connect({
|
||||
host: creds.host,
|
||||
port: creds.port,
|
||||
username: creds.username,
|
||||
password: creds.password || undefined,
|
||||
privateKey: creds.privateKey || undefined
|
||||
});
|
||||
|
||||
const folder = (destination.path || '/dashcaddy-backups').replace(/\/+$/, '');
|
||||
// Ensure remote dir exists
|
||||
try {
|
||||
const exists = await client.exists(folder);
|
||||
if (!exists) await client.mkdir(folder, true);
|
||||
} catch (_) {
|
||||
// best-effort
|
||||
}
|
||||
|
||||
const remotePath = `${folder}/${backupId}.backup`;
|
||||
await client.put(Buffer.from(data), remotePath);
|
||||
|
||||
return {
|
||||
type: 'sftp',
|
||||
path: remotePath,
|
||||
size: data.length
|
||||
};
|
||||
} finally {
|
||||
try { await client.end(); } catch (_) {}
|
||||
}
|
||||
}
|
||||
|
||||
async loadFromSFTP(location) {
|
||||
const SftpClient = require('ssh2-sftp-client');
|
||||
const creds = await this._getCloudCredentials('sftp');
|
||||
const client = new SftpClient();
|
||||
try {
|
||||
await client.connect({
|
||||
host: creds.host,
|
||||
port: creds.port,
|
||||
username: creds.username,
|
||||
password: creds.password || undefined,
|
||||
privateKey: creds.privateKey || undefined
|
||||
});
|
||||
const buffer = await client.get(location.path);
|
||||
return Buffer.isBuffer(buffer) ? buffer : Buffer.from(buffer);
|
||||
} finally {
|
||||
try { await client.end(); } catch (_) {}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that a destination is reachable + writable + deletable.
|
||||
* Performs a small write/read/delete probe.
|
||||
*/
|
||||
async testDestination(destination) {
|
||||
const probeId = `test-${Date.now()}`;
|
||||
const probeData = Buffer.from(`dashcaddy-test-${probeId}`);
|
||||
const start = Date.now();
|
||||
|
||||
try {
|
||||
const location = await this.saveToDestination(probeData, destination, probeId);
|
||||
|
||||
// Read it back
|
||||
let readBack = null;
|
||||
try {
|
||||
readBack = await this.loadFromDestination(location);
|
||||
} catch (_) {
|
||||
// Some providers (e.g. local) we already trust the file system; skip
|
||||
}
|
||||
|
||||
// Delete the probe
|
||||
try {
|
||||
await this._deleteFromDestination(location);
|
||||
} catch (_) {}
|
||||
|
||||
const elapsed = Date.now() - start;
|
||||
return {
|
||||
success: true,
|
||||
type: destination.type,
|
||||
elapsedMs: elapsed,
|
||||
verified: readBack ? readBack.equals(probeData) : null
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
type: destination.type,
|
||||
error: error.message,
|
||||
elapsedMs: Date.now() - start
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a backup from a destination location
|
||||
*/
|
||||
async _deleteFromDestination(location) {
|
||||
if (location.type === 'local') {
|
||||
if (fs.existsSync(location.path)) fs.unlinkSync(location.path);
|
||||
return;
|
||||
}
|
||||
if (location.type === 'dropbox') {
|
||||
const { Dropbox } = require('dropbox');
|
||||
const creds = await this._getCloudCredentials('dropbox');
|
||||
const dbx = new Dropbox({ accessToken: creds.token });
|
||||
try { await dbx.filesDeleteV2({ path: location.path }); } catch (_) {}
|
||||
return;
|
||||
}
|
||||
if (location.type === 'webdav') {
|
||||
const { createClient } = require('webdav');
|
||||
const creds = await this._getCloudCredentials('webdav');
|
||||
const client = createClient(creds.url, { username: creds.username, password: creds.password });
|
||||
try { await client.deleteFile(location.path); } catch (_) {}
|
||||
return;
|
||||
}
|
||||
if (location.type === 'sftp') {
|
||||
const SftpClient = require('ssh2-sftp-client');
|
||||
const creds = await this._getCloudCredentials('sftp');
|
||||
const client = new SftpClient();
|
||||
try {
|
||||
await client.connect({
|
||||
host: creds.host,
|
||||
port: creds.port,
|
||||
username: creds.username,
|
||||
password: creds.password || undefined,
|
||||
privateKey: creds.privateKey || undefined
|
||||
});
|
||||
try { await client.delete(location.path); } catch (_) {}
|
||||
} finally {
|
||||
try { await client.end(); } catch (_) {}
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify backup integrity
|
||||
*/
|
||||
@@ -605,9 +881,24 @@ class BackupManager extends EventEmitter {
|
||||
throw new Error(`Backup not found: ${backupId}`);
|
||||
}
|
||||
|
||||
// Load backup data
|
||||
const location = backup.locations[0]; // Use first location
|
||||
let data = fs.readFileSync(location.path);
|
||||
// Load backup data — try each destination location until one succeeds
|
||||
const location = backup.locations[0]; // Primary location
|
||||
let data;
|
||||
try {
|
||||
data = await this.loadFromDestination(location);
|
||||
} catch (loadErr) {
|
||||
// Fall back to other locations if available
|
||||
let recovered = false;
|
||||
for (let i = 1; i < backup.locations.length; i++) {
|
||||
try {
|
||||
data = await this.loadFromDestination(backup.locations[i]);
|
||||
recovered = true;
|
||||
console.log(`[BackupManager] Loaded backup from fallback location ${backup.locations[i].type}`);
|
||||
break;
|
||||
} catch (_) {}
|
||||
}
|
||||
if (!recovered) throw loadErr;
|
||||
}
|
||||
|
||||
// Decrypt if needed
|
||||
if (backup.encrypted && options.encryptionKey) {
|
||||
@@ -718,16 +1009,18 @@ class BackupManager extends EventEmitter {
|
||||
|
||||
for (const backup of toDelete) {
|
||||
try {
|
||||
// Delete from all locations
|
||||
// Delete from all locations (local + cloud)
|
||||
for (const location of backup.locations) {
|
||||
if (location.type === 'local' && fs.existsSync(location.path)) {
|
||||
fs.unlinkSync(location.path);
|
||||
try {
|
||||
await this._deleteFromDestination(location);
|
||||
} catch (delErr) {
|
||||
console.warn(`[BackupManager] Could not delete ${location.type} location for ${backup.id}:`, delErr.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove from history
|
||||
this.history = this.history.filter(b => b.id !== backup.id);
|
||||
|
||||
|
||||
console.log(`[BackupManager] Deleted old backup: ${backup.id}`);
|
||||
} catch (error) {
|
||||
console.error(`[BackupManager] Error deleting backup ${backup.id}:`, error.message);
|
||||
|
||||
Reference in New Issue
Block a user