feat: cloud backup destinations + long-term resource history

Cloud backups (Dropbox / WebDAV / SFTP):
- backup-manager.js: save + load handlers per provider, credential
  resolution via credentialManager, destination probe.
- routes/backups.js: /credentials/{provider} (masked GET, POST, DELETE),
  /test-destination, scheduling endpoints.
- status/js/backup-restore.js: destination picker, provider-specific
  credential forms, test button wired to backend probe.
- npm deps already present (dropbox 10.34.0, webdav 5.7.1,
  ssh2-sftp-client 11.0.0).

Resource history:
- resource-monitor.js: three-tier rollup storage — raw 10s samples
  (7-day retention), hourly rollups (30-day), daily rollups
  (365-day). getHistoryByRange() auto-selects the appropriate tier.
- routes/monitoring.js: /monitoring/history/:containerId now supports
  startTime/endTime range mode (legacy ?hours=N still works).
- status/js/resource-monitor.js + dashboard.css: "History" tab with
  range buttons (1h/24h/7d/30d/1y), SVG sparklines for
  CPU / memory / network. Renderer handles raw and rolled-up shapes.

status/dist/features.js rebuilt from source via build.js.

Lifted out of wip/cloud-backups-and-history; the half-finished
app-deps feature from that branch (frontend calls /api/v1/apps/
check-dependencies but the endpoint doesn't exist) is preserved
separately on wip/app-deps for later.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-05-06 19:14:38 -07:00
parent 6e47df0d3c
commit f537a0dd25
8 changed files with 1440 additions and 233 deletions

View File

@@ -543,17 +543,42 @@ class BackupManager extends EventEmitter {
switch (destination.type) {
case 'local':
return await this.saveToLocal(data, destination, backupId);
case 'dropbox':
return await this.saveToDropbox(data, destination, backupId);
case 'webdav':
return await this.saveToWebDAV(data, destination, backupId);
case 'sftp':
return await this.saveToSFTP(data, destination, backupId);
default:
throw new Error(`Unsupported destination type: ${destination.type}`);
}
}
/**
* Load encrypted backup blob from a destination location.
* Returns a Buffer that can be passed to decryptBackup/decompressBackup.
*/
async loadFromDestination(location) {
switch (location.type) {
case 'local':
return fs.readFileSync(location.path);
case 'dropbox':
return await this.loadFromDropbox(location);
case 'webdav':
return await this.loadFromWebDAV(location);
case 'sftp':
return await this.loadFromSFTP(location);
default:
throw new Error(`Unsupported destination type: ${location.type}`);
}
}
/**
* Save to local filesystem
*/
async saveToLocal(data, destination, backupId) {
const backupDir = destination.path || DEFAULT_BACKUP_DIR;
// Ensure directory exists
if (!fs.existsSync(backupDir)) {
fs.mkdirSync(backupDir, { recursive: true });
@@ -561,9 +586,9 @@ class BackupManager extends EventEmitter {
const filename = `${backupId}.backup`;
const filepath = path.join(backupDir, filename);
fs.writeFileSync(filepath, data);
return {
type: 'local',
path: filepath,
@@ -571,6 +596,257 @@ class BackupManager extends EventEmitter {
};
}
// ==================== CLOUD DESTINATIONS ====================
/**
* Resolve credentials for a given provider via the credentialManager.
* Throws if required fields are missing.
*/
async _getCloudCredentials(provider) {
const credentialManager = require('./credential-manager');
const creds = {};
if (provider === 'dropbox') {
creds.token = await credentialManager.retrieve('backup.dropbox.token');
if (!creds.token) throw new Error('Dropbox token not configured');
} else if (provider === 'webdav') {
creds.url = await credentialManager.retrieve('backup.webdav.url');
creds.username = await credentialManager.retrieve('backup.webdav.username');
creds.password = await credentialManager.retrieve('backup.webdav.password');
if (!creds.url || !creds.username || !creds.password) {
throw new Error('WebDAV credentials incomplete (need url, username, password)');
}
} else if (provider === 'sftp') {
creds.host = await credentialManager.retrieve('backup.sftp.host');
const portStr = await credentialManager.retrieve('backup.sftp.port');
creds.port = parseInt(portStr || '22', 10);
creds.username = await credentialManager.retrieve('backup.sftp.username');
creds.password = await credentialManager.retrieve('backup.sftp.password');
creds.privateKey = await credentialManager.retrieve('backup.sftp.privateKey');
if (!creds.host || !creds.username || (!creds.password && !creds.privateKey)) {
throw new Error('SFTP credentials incomplete (need host, username, and either password or privateKey)');
}
}
return creds;
}
// ----- Dropbox -----
async saveToDropbox(data, destination, backupId) {
const { Dropbox } = require('dropbox');
const creds = await this._getCloudCredentials('dropbox');
const dbx = new Dropbox({ accessToken: creds.token });
const folder = (destination.path || '/dashcaddy-backups').replace(/\/+$/, '');
const remotePath = `${folder}/${backupId}.backup`;
await dbx.filesUpload({
path: remotePath,
contents: data,
mode: { '.tag': 'overwrite' },
autorename: false,
mute: true
});
return {
type: 'dropbox',
path: remotePath,
size: data.length
};
}
async loadFromDropbox(location) {
const { Dropbox } = require('dropbox');
const creds = await this._getCloudCredentials('dropbox');
const dbx = new Dropbox({ accessToken: creds.token });
const result = await dbx.filesDownload({ path: location.path });
// Node SDK returns fileBinary on the result
const fileBinary = result.result.fileBinary || result.result.fileBlob;
if (Buffer.isBuffer(fileBinary)) return fileBinary;
return Buffer.from(fileBinary);
}
// ----- WebDAV -----
async saveToWebDAV(data, destination, backupId) {
const { createClient } = require('webdav');
const creds = await this._getCloudCredentials('webdav');
const client = createClient(creds.url, {
username: creds.username,
password: creds.password
});
const folder = (destination.path || '/dashcaddy-backups').replace(/\/+$/, '');
// Ensure folder exists
try {
const exists = await client.exists(folder);
if (!exists) await client.createDirectory(folder, { recursive: true });
} catch (_) {
// best-effort
}
const remotePath = `${folder}/${backupId}.backup`;
await client.putFileContents(remotePath, data, { overwrite: true });
return {
type: 'webdav',
path: remotePath,
size: data.length
};
}
async loadFromWebDAV(location) {
const { createClient } = require('webdav');
const creds = await this._getCloudCredentials('webdav');
const client = createClient(creds.url, {
username: creds.username,
password: creds.password
});
const data = await client.getFileContents(location.path);
return Buffer.isBuffer(data) ? data : Buffer.from(data);
}
// ----- SFTP -----
async saveToSFTP(data, destination, backupId) {
const SftpClient = require('ssh2-sftp-client');
const creds = await this._getCloudCredentials('sftp');
const client = new SftpClient();
try {
await client.connect({
host: creds.host,
port: creds.port,
username: creds.username,
password: creds.password || undefined,
privateKey: creds.privateKey || undefined
});
const folder = (destination.path || '/dashcaddy-backups').replace(/\/+$/, '');
// Ensure remote dir exists
try {
const exists = await client.exists(folder);
if (!exists) await client.mkdir(folder, true);
} catch (_) {
// best-effort
}
const remotePath = `${folder}/${backupId}.backup`;
await client.put(Buffer.from(data), remotePath);
return {
type: 'sftp',
path: remotePath,
size: data.length
};
} finally {
try { await client.end(); } catch (_) {}
}
}
async loadFromSFTP(location) {
const SftpClient = require('ssh2-sftp-client');
const creds = await this._getCloudCredentials('sftp');
const client = new SftpClient();
try {
await client.connect({
host: creds.host,
port: creds.port,
username: creds.username,
password: creds.password || undefined,
privateKey: creds.privateKey || undefined
});
const buffer = await client.get(location.path);
return Buffer.isBuffer(buffer) ? buffer : Buffer.from(buffer);
} finally {
try { await client.end(); } catch (_) {}
}
}
/**
* Test that a destination is reachable + writable + deletable.
* Performs a small write/read/delete probe.
*/
async testDestination(destination) {
const probeId = `test-${Date.now()}`;
const probeData = Buffer.from(`dashcaddy-test-${probeId}`);
const start = Date.now();
try {
const location = await this.saveToDestination(probeData, destination, probeId);
// Read it back
let readBack = null;
try {
readBack = await this.loadFromDestination(location);
} catch (_) {
// Some providers (e.g. local) we already trust the file system; skip
}
// Delete the probe
try {
await this._deleteFromDestination(location);
} catch (_) {}
const elapsed = Date.now() - start;
return {
success: true,
type: destination.type,
elapsedMs: elapsed,
verified: readBack ? readBack.equals(probeData) : null
};
} catch (error) {
return {
success: false,
type: destination.type,
error: error.message,
elapsedMs: Date.now() - start
};
}
}
/**
* Delete a backup from a destination location
*/
async _deleteFromDestination(location) {
if (location.type === 'local') {
if (fs.existsSync(location.path)) fs.unlinkSync(location.path);
return;
}
if (location.type === 'dropbox') {
const { Dropbox } = require('dropbox');
const creds = await this._getCloudCredentials('dropbox');
const dbx = new Dropbox({ accessToken: creds.token });
try { await dbx.filesDeleteV2({ path: location.path }); } catch (_) {}
return;
}
if (location.type === 'webdav') {
const { createClient } = require('webdav');
const creds = await this._getCloudCredentials('webdav');
const client = createClient(creds.url, { username: creds.username, password: creds.password });
try { await client.deleteFile(location.path); } catch (_) {}
return;
}
if (location.type === 'sftp') {
const SftpClient = require('ssh2-sftp-client');
const creds = await this._getCloudCredentials('sftp');
const client = new SftpClient();
try {
await client.connect({
host: creds.host,
port: creds.port,
username: creds.username,
password: creds.password || undefined,
privateKey: creds.privateKey || undefined
});
try { await client.delete(location.path); } catch (_) {}
} finally {
try { await client.end(); } catch (_) {}
}
return;
}
}
/**
* Verify backup integrity
*/
@@ -605,9 +881,24 @@ class BackupManager extends EventEmitter {
throw new Error(`Backup not found: ${backupId}`);
}
// Load backup data
const location = backup.locations[0]; // Use first location
let data = fs.readFileSync(location.path);
// Load backup data — try each destination location until one succeeds
const location = backup.locations[0]; // Primary location
let data;
try {
data = await this.loadFromDestination(location);
} catch (loadErr) {
// Fall back to other locations if available
let recovered = false;
for (let i = 1; i < backup.locations.length; i++) {
try {
data = await this.loadFromDestination(backup.locations[i]);
recovered = true;
console.log(`[BackupManager] Loaded backup from fallback location ${backup.locations[i].type}`);
break;
} catch (_) {}
}
if (!recovered) throw loadErr;
}
// Decrypt if needed
if (backup.encrypted && options.encryptionKey) {
@@ -718,16 +1009,18 @@ class BackupManager extends EventEmitter {
for (const backup of toDelete) {
try {
// Delete from all locations
// Delete from all locations (local + cloud)
for (const location of backup.locations) {
if (location.type === 'local' && fs.existsSync(location.path)) {
fs.unlinkSync(location.path);
try {
await this._deleteFromDestination(location);
} catch (delErr) {
console.warn(`[BackupManager] Could not delete ${location.type} location for ${backup.id}:`, delErr.message);
}
}
// Remove from history
this.history = this.history.filter(b => b.id !== backup.id);
console.log(`[BackupManager] Deleted old backup: ${backup.id}`);
} catch (error) {
console.error(`[BackupManager] Error deleting backup ${backup.id}:`, error.message);

View File

@@ -13,20 +13,32 @@ const docker = new Docker();
// Configuration
const STATS_FILE = process.env.STATS_FILE || path.join(__dirname, 'container-stats.json');
const STATS_HOURLY_FILE = process.env.STATS_HOURLY_FILE || path.join(__dirname, 'container-stats-hourly.json');
const STATS_DAILY_FILE = process.env.STATS_DAILY_FILE || path.join(__dirname, 'container-stats-daily.json');
const ALERT_CONFIG_FILE = process.env.ALERT_CONFIG_FILE || path.join(__dirname, 'alert-config.json');
const STATS_RETENTION_HOURS = parseInt(process.env.STATS_RETENTION_HOURS || '168', 10); // 7 days default
const STATS_RETENTION_HOURS = parseInt(process.env.STATS_RETENTION_HOURS || '168', 10); // 7 days raw
const STATS_HOURLY_RETENTION_DAYS = parseInt(process.env.STATS_HOURLY_RETENTION_DAYS || '30', 10); // 30 days hourly
const STATS_DAILY_RETENTION_DAYS = parseInt(process.env.STATS_DAILY_RETENTION_DAYS || '365', 10); // 365 days daily
const MONITORING_INTERVAL = parseInt(process.env.MONITORING_INTERVAL || '10000', 10); // 10 seconds
const ROLLUP_HOURLY_INTERVAL = parseInt(process.env.ROLLUP_HOURLY_INTERVAL || String(60 * 60 * 1000), 10); // 1h
const ROLLUP_DAILY_INTERVAL = parseInt(process.env.ROLLUP_DAILY_INTERVAL || String(24 * 60 * 60 * 1000), 10); // 24h
class ResourceMonitor extends EventEmitter {
constructor() {
super();
this.monitoring = false;
this.monitoringInterval = null;
this.stats = new Map(); // containerId -> array of stats
this.hourlyRollupTimer = null;
this.dailyRollupTimer = null;
this.stats = new Map(); // containerId -> { name, history: [...] } (raw 10s samples, 7d)
this.hourlyHistory = new Map(); // containerId -> { name, samples: [...] } (hourly avg, 30d)
this.dailyHistory = new Map(); // containerId -> { name, samples: [...] } (daily avg, 365d)
this.alerts = new Map(); // containerId -> alert config
this.lastAlerts = new Map(); // containerId -> last alert timestamp
this.loadStats();
this.loadHourlyStats();
this.loadDailyStats();
this.loadAlertConfig();
}
@@ -42,7 +54,23 @@ class ResourceMonitor extends EventEmitter {
console.log('[ResourceMonitor] Starting container monitoring');
this.monitoring = true;
this.monitoringInterval = setInterval(() => this.collectStats(), MONITORING_INTERVAL);
// Hourly rollup — fires once an hour, computes the previous full hour
this.hourlyRollupTimer = setInterval(() => {
try { this.rollupHourly(); } catch (e) { console.error('[ResourceMonitor] hourly rollup error:', e.message); }
}, ROLLUP_HOURLY_INTERVAL);
// Daily rollup — schedule first run at the next midnight, then fire every 24h
const now = new Date();
const nextMidnight = new Date(now.getFullYear(), now.getMonth(), now.getDate() + 1, 0, 0, 5);
const msUntilMidnight = nextMidnight.getTime() - now.getTime();
setTimeout(() => {
try { this.rollupDaily(); } catch (e) { console.error('[ResourceMonitor] daily rollup error:', e.message); }
this.dailyRollupTimer = setInterval(() => {
try { this.rollupDaily(); } catch (e) { console.error('[ResourceMonitor] daily rollup error:', e.message); }
}, ROLLUP_DAILY_INTERVAL);
}, msUntilMidnight);
// Initial collection
this.collectStats();
}
@@ -52,16 +80,26 @@ class ResourceMonitor extends EventEmitter {
*/
stop() {
if (!this.monitoring) return;
console.log('[ResourceMonitor] Stopping container monitoring');
this.monitoring = false;
if (this.monitoringInterval) {
clearInterval(this.monitoringInterval);
this.monitoringInterval = null;
}
if (this.hourlyRollupTimer) {
clearInterval(this.hourlyRollupTimer);
this.hourlyRollupTimer = null;
}
if (this.dailyRollupTimer) {
clearInterval(this.dailyRollupTimer);
this.dailyRollupTimer = null;
}
this.saveStats();
this.saveHourlyStats();
this.saveDailyStats();
}
/**
@@ -464,12 +502,310 @@ class ResourceMonitor extends EventEmitter {
}
}
/**
* Aggregate a list of raw samples into a single rollup sample
* @param {Array} samples - Raw stats samples
* @param {string} timestamp - ISO timestamp to use for the rollup bucket
* @returns {Object|null} Aggregated sample, or null if input is empty
*/
_aggregateSamples(samples, timestamp) {
if (!samples || samples.length === 0) return null;
let cpuSum = 0, cpuMax = 0;
let memSum = 0, memMax = 0;
let memPctSum = 0, memPctMax = 0;
let netRxSum = 0, netTxSum = 0;
let diskRSum = 0, diskWSum = 0;
for (const s of samples) {
const cpu = s.cpu?.percent || 0;
const memUsage = s.memory?.usage || 0;
const memPct = s.memory?.percent || 0;
cpuSum += cpu; if (cpu > cpuMax) cpuMax = cpu;
memSum += memUsage; if (memUsage > memMax) memMax = memUsage;
memPctSum += memPct; if (memPct > memPctMax) memPctMax = memPct;
netRxSum += s.network?.rxBytes || 0;
netTxSum += s.network?.txBytes || 0;
diskRSum += s.disk?.readBytes || 0;
diskWSum += s.disk?.writeBytes || 0;
}
const n = samples.length;
return {
timestamp,
sampleCount: n,
cpu: {
avg: Math.round((cpuSum / n) * 100) / 100,
max: Math.round(cpuMax * 100) / 100,
},
memory: {
avgUsage: Math.round(memSum / n),
maxUsage: memMax,
avgPercent: Math.round((memPctSum / n) * 100) / 100,
maxPercent: Math.round(memPctMax * 100) / 100,
avgUsageMB: Math.round(memSum / n / 1024 / 1024),
maxUsageMB: Math.round(memMax / 1024 / 1024),
},
network: {
rxBytes: netRxSum,
txBytes: netTxSum,
rxMB: Math.round(netRxSum / 1024 / 1024 * 100) / 100,
txMB: Math.round(netTxSum / 1024 / 1024 * 100) / 100,
},
disk: {
readBytes: diskRSum,
writeBytes: diskWSum,
readMB: Math.round(diskRSum / 1024 / 1024 * 100) / 100,
writeMB: Math.round(diskWSum / 1024 / 1024 * 100) / 100,
},
};
}
/**
* Combine already-aggregated samples (e.g. hourly buckets) into a single coarser bucket
* @param {Array} samples - Aggregated samples (output of _aggregateSamples)
* @param {string} timestamp - ISO timestamp to use for the rollup bucket
* @returns {Object|null}
*/
_combineAggregated(samples, timestamp) {
if (!samples || samples.length === 0) return null;
let totalCount = 0;
let cpuWeightedSum = 0, cpuMax = 0;
let memWeightedSum = 0, memMax = 0;
let memPctWeightedSum = 0, memPctMax = 0;
let netRxSum = 0, netTxSum = 0;
let diskRSum = 0, diskWSum = 0;
for (const s of samples) {
const w = s.sampleCount || 1;
totalCount += w;
cpuWeightedSum += (s.cpu?.avg || 0) * w;
if ((s.cpu?.max || 0) > cpuMax) cpuMax = s.cpu.max;
memWeightedSum += (s.memory?.avgUsage || 0) * w;
if ((s.memory?.maxUsage || 0) > memMax) memMax = s.memory.maxUsage;
memPctWeightedSum += (s.memory?.avgPercent || 0) * w;
if ((s.memory?.maxPercent || 0) > memPctMax) memPctMax = s.memory.maxPercent;
netRxSum += s.network?.rxBytes || 0;
netTxSum += s.network?.txBytes || 0;
diskRSum += s.disk?.readBytes || 0;
diskWSum += s.disk?.writeBytes || 0;
}
return {
timestamp,
sampleCount: totalCount,
cpu: {
avg: Math.round((cpuWeightedSum / totalCount) * 100) / 100,
max: Math.round(cpuMax * 100) / 100,
},
memory: {
avgUsage: Math.round(memWeightedSum / totalCount),
maxUsage: memMax,
avgPercent: Math.round((memPctWeightedSum / totalCount) * 100) / 100,
maxPercent: Math.round(memPctMax * 100) / 100,
avgUsageMB: Math.round(memWeightedSum / totalCount / 1024 / 1024),
maxUsageMB: Math.round(memMax / 1024 / 1024),
},
network: {
rxBytes: netRxSum,
txBytes: netTxSum,
rxMB: Math.round(netRxSum / 1024 / 1024 * 100) / 100,
txMB: Math.round(netTxSum / 1024 / 1024 * 100) / 100,
},
disk: {
readBytes: diskRSum,
writeBytes: diskWSum,
readMB: Math.round(diskRSum / 1024 / 1024 * 100) / 100,
writeMB: Math.round(diskWSum / 1024 / 1024 * 100) / 100,
},
};
}
/**
* Roll up the previous complete hour of raw samples into a single hourly point.
* Trims hourlyHistory entries older than STATS_HOURLY_RETENTION_DAYS.
*/
rollupHourly() {
const now = new Date();
// The "previous complete hour" — bucket starts at top of (current_hour - 1)
const bucketStart = new Date(now.getFullYear(), now.getMonth(), now.getDate(), now.getHours() - 1, 0, 0);
const bucketEnd = new Date(bucketStart.getTime() + 60 * 60 * 1000);
const bucketStartMs = bucketStart.getTime();
const bucketEndMs = bucketEnd.getTime();
const bucketTimestamp = bucketStart.toISOString();
for (const [containerId, data] of this.stats.entries()) {
const samples = data.history.filter(s => {
const t = new Date(s.timestamp).getTime();
return t >= bucketStartMs && t < bucketEndMs;
});
if (samples.length === 0) continue;
const rollup = this._aggregateSamples(samples, bucketTimestamp);
if (!rollup) continue;
if (!this.hourlyHistory.has(containerId)) {
this.hourlyHistory.set(containerId, { name: data.name, samples: [] });
}
const entry = this.hourlyHistory.get(containerId);
entry.name = data.name;
// Avoid duplicate buckets if rollup ran twice
if (!entry.samples.find(s => s.timestamp === bucketTimestamp)) {
entry.samples.push(rollup);
}
// Trim old entries
const cutoff = Date.now() - (STATS_HOURLY_RETENTION_DAYS * 24 * 60 * 60 * 1000);
entry.samples = entry.samples.filter(s => new Date(s.timestamp).getTime() > cutoff);
}
this.saveHourlyStats();
}
/**
* Roll up the previous complete day of hourly samples into a single daily point.
* Trims dailyHistory entries older than STATS_DAILY_RETENTION_DAYS.
*/
rollupDaily() {
const now = new Date();
// Previous calendar day, midnight to midnight
const bucketStart = new Date(now.getFullYear(), now.getMonth(), now.getDate() - 1, 0, 0, 0);
const bucketEnd = new Date(now.getFullYear(), now.getMonth(), now.getDate(), 0, 0, 0);
const bucketStartMs = bucketStart.getTime();
const bucketEndMs = bucketEnd.getTime();
const bucketTimestamp = bucketStart.toISOString();
for (const [containerId, data] of this.hourlyHistory.entries()) {
const samples = data.samples.filter(s => {
const t = new Date(s.timestamp).getTime();
return t >= bucketStartMs && t < bucketEndMs;
});
if (samples.length === 0) continue;
const rollup = this._combineAggregated(samples, bucketTimestamp);
if (!rollup) continue;
if (!this.dailyHistory.has(containerId)) {
this.dailyHistory.set(containerId, { name: data.name, samples: [] });
}
const entry = this.dailyHistory.get(containerId);
entry.name = data.name;
if (!entry.samples.find(s => s.timestamp === bucketTimestamp)) {
entry.samples.push(rollup);
}
const cutoff = Date.now() - (STATS_DAILY_RETENTION_DAYS * 24 * 60 * 60 * 1000);
entry.samples = entry.samples.filter(s => new Date(s.timestamp).getTime() > cutoff);
}
this.saveDailyStats();
}
/**
* Get history for a container by time range, auto-selecting the appropriate tier.
* - <= 24h → raw 10s samples
* - 1-30 days → hourly rollups
* - > 30 days → daily rollups
* @param {string} containerId
* @param {number} startTime - epoch ms
* @param {number} endTime - epoch ms
* @returns {{ tier: 'raw'|'hourly'|'daily', samples: Array, unit: string }}
*/
getHistoryByRange(containerId, startTime, endTime) {
const rangeMs = endTime - startTime;
const oneDay = 24 * 60 * 60 * 1000;
const thirtyDays = 30 * oneDay;
let tier, samples;
if (rangeMs <= oneDay) {
tier = 'raw';
const data = this.stats.get(containerId);
samples = data ? data.history.filter(s => {
const t = new Date(s.timestamp).getTime();
return t >= startTime && t <= endTime;
}) : [];
} else if (rangeMs <= thirtyDays) {
tier = 'hourly';
const data = this.hourlyHistory.get(containerId);
samples = data ? data.samples.filter(s => {
const t = new Date(s.timestamp).getTime();
return t >= startTime && t <= endTime;
}) : [];
} else {
tier = 'daily';
const data = this.dailyHistory.get(containerId);
samples = data ? data.samples.filter(s => {
const t = new Date(s.timestamp).getTime();
return t >= startTime && t <= endTime;
}) : [];
}
return { tier, samples, unit: tier === 'raw' ? '10s' : tier === 'hourly' ? '1h' : '1d' };
}
/**
* Load hourly rollups from disk
*/
loadHourlyStats() {
try {
if (fs.existsSync(STATS_HOURLY_FILE)) {
const data = JSON.parse(fs.readFileSync(STATS_HOURLY_FILE, 'utf8'));
this.hourlyHistory = new Map(Object.entries(data));
console.log(`[ResourceMonitor] Loaded hourly rollups for ${this.hourlyHistory.size} containers`);
}
} catch (error) {
console.error('[ResourceMonitor] Error loading hourly stats:', error.message);
}
}
/**
* Save hourly rollups to disk
*/
saveHourlyStats() {
try {
const data = Object.fromEntries(this.hourlyHistory);
fs.writeFileSync(STATS_HOURLY_FILE, JSON.stringify(data, null, 2));
} catch (error) {
console.error('[ResourceMonitor] Error saving hourly stats:', error.message);
}
}
/**
* Load daily rollups from disk
*/
loadDailyStats() {
try {
if (fs.existsSync(STATS_DAILY_FILE)) {
const data = JSON.parse(fs.readFileSync(STATS_DAILY_FILE, 'utf8'));
this.dailyHistory = new Map(Object.entries(data));
console.log(`[ResourceMonitor] Loaded daily rollups for ${this.dailyHistory.size} containers`);
}
} catch (error) {
console.error('[ResourceMonitor] Error loading daily stats:', error.message);
}
}
/**
* Save daily rollups to disk
*/
saveDailyStats() {
try {
const data = Object.fromEntries(this.dailyHistory);
fs.writeFileSync(STATS_DAILY_FILE, JSON.stringify(data, null, 2));
} catch (error) {
console.error('[ResourceMonitor] Error saving daily stats:', error.message);
}
}
/**
* Export stats for backup
*/
exportStats() {
return {
stats: Object.fromEntries(this.stats),
hourlyHistory: Object.fromEntries(this.hourlyHistory),
dailyHistory: Object.fromEntries(this.dailyHistory),
alerts: Object.fromEntries(this.alerts),
exportedAt: new Date().toISOString()
};
@@ -482,10 +818,18 @@ class ResourceMonitor extends EventEmitter {
if (data.stats) {
this.stats = new Map(Object.entries(data.stats));
}
if (data.hourlyHistory) {
this.hourlyHistory = new Map(Object.entries(data.hourlyHistory));
}
if (data.dailyHistory) {
this.dailyHistory = new Map(Object.entries(data.dailyHistory));
}
if (data.alerts) {
this.alerts = new Map(Object.entries(data.alerts));
}
this.saveStats();
this.saveHourlyStats();
this.saveDailyStats();
this.saveAlertConfig();
}
}

View File

@@ -42,5 +42,115 @@ module.exports = function({ backupManager, asyncHandler }) {
success(res, { result });
}, 'backups-restore'));
// ==================== CLOUD DESTINATIONS ====================
// Test a destination (write+read+delete probe)
router.post('/backups/test-destination', asyncHandler(async (req, res) => {
const destination = req.body;
if (!destination || !destination.type) {
const { ValidationError } = require('../errors');
throw new ValidationError('destination.type is required');
}
const result = await backupManager.testDestination(destination);
success(res, result);
}, 'backups-test-destination'));
// Get cloud credentials (masked) for a provider
// Provider: dropbox | webdav | sftp
router.get('/backups/credentials/:provider', asyncHandler(async (req, res) => {
const credentialManager = require('../credential-manager');
const provider = req.params.provider;
if (!['dropbox', 'webdav', 'sftp'].includes(provider)) {
const { ValidationError } = require('../errors');
throw new ValidationError('Invalid provider');
}
const mask = (val) => val ? '***' : null;
let creds = {};
if (provider === 'dropbox') {
const token = await credentialManager.retrieve('backup.dropbox.token');
creds = { token: mask(token) };
} else if (provider === 'webdav') {
creds = {
url: (await credentialManager.retrieve('backup.webdav.url')) || null,
username: (await credentialManager.retrieve('backup.webdav.username')) || null,
password: mask(await credentialManager.retrieve('backup.webdav.password'))
};
} else if (provider === 'sftp') {
creds = {
host: (await credentialManager.retrieve('backup.sftp.host')) || null,
port: (await credentialManager.retrieve('backup.sftp.port')) || '22',
username: (await credentialManager.retrieve('backup.sftp.username')) || null,
password: mask(await credentialManager.retrieve('backup.sftp.password')),
privateKey: mask(await credentialManager.retrieve('backup.sftp.privateKey'))
};
}
success(res, { provider, credentials: creds });
}, 'backups-credentials-get'));
// Save cloud credentials for a provider
router.post('/backups/credentials/:provider', asyncHandler(async (req, res) => {
const credentialManager = require('../credential-manager');
const { ValidationError } = require('../errors');
const provider = req.params.provider;
if (!['dropbox', 'webdav', 'sftp'].includes(provider)) {
throw new ValidationError('Invalid provider');
}
const body = req.body || {};
const storeIfPresent = async (key, val) => {
if (val !== undefined && val !== null && val !== '' && val !== '***') {
await credentialManager.store(key, String(val));
}
};
if (provider === 'dropbox') {
if (!body.token || body.token === '***') {
const existing = await credentialManager.retrieve('backup.dropbox.token');
if (!existing) {
throw new ValidationError('Dropbox token required');
}
} else {
await credentialManager.store('backup.dropbox.token', body.token);
}
} else if (provider === 'webdav') {
await storeIfPresent('backup.webdav.url', body.url);
await storeIfPresent('backup.webdav.username', body.username);
await storeIfPresent('backup.webdav.password', body.password);
} else if (provider === 'sftp') {
await storeIfPresent('backup.sftp.host', body.host);
await storeIfPresent('backup.sftp.port', body.port);
await storeIfPresent('backup.sftp.username', body.username);
await storeIfPresent('backup.sftp.password', body.password);
await storeIfPresent('backup.sftp.privateKey', body.privateKey);
}
success(res, { message: `${provider} credentials saved` });
}, 'backups-credentials-set'));
// Delete cloud credentials for a provider
router.delete('/backups/credentials/:provider', asyncHandler(async (req, res) => {
const credentialManager = require('../credential-manager');
const { ValidationError } = require('../errors');
const provider = req.params.provider;
if (!['dropbox', 'webdav', 'sftp'].includes(provider)) {
throw new ValidationError('Invalid provider');
}
const keys = {
dropbox: ['backup.dropbox.token'],
webdav: ['backup.webdav.url', 'backup.webdav.username', 'backup.webdav.password'],
sftp: ['backup.sftp.host', 'backup.sftp.port', 'backup.sftp.username', 'backup.sftp.password', 'backup.sftp.privateKey']
};
for (const k of keys[provider]) {
try { await credentialManager.delete(k); } catch (_) {}
}
success(res, { message: `${provider} credentials deleted` });
}, 'backups-credentials-delete'));
return router;
};

View File

@@ -31,11 +31,28 @@ module.exports = function({ resourceMonitor, docker, asyncHandler, log }) {
success(res, { stats });
}, 'monitoring-stats-container'));
// Get historical stats
// Get historical stats — supports either ?hours=24 (legacy raw) OR ?startTime=...&endTime=...
// (range mode auto-selects raw / hourly / daily tier)
router.get('/monitoring/history/:containerId', asyncHandler(async (req, res) => {
const containerId = req.params.containerId;
// Range mode (preferred)
if (req.query.startTime && req.query.endTime) {
const startTime = parseInt(req.query.startTime, 10);
const endTime = parseInt(req.query.endTime, 10);
if (Number.isNaN(startTime) || Number.isNaN(endTime) || startTime >= endTime) {
const { ValidationError } = require('../errors');
throw new ValidationError('Invalid startTime/endTime');
}
const result = resourceMonitor.getHistoryByRange(containerId, startTime, endTime);
success(res, { ...result, startTime, endTime });
return;
}
// Legacy hours-based mode (raw samples only)
const hours = parseInt(req.query.hours) || 24;
const history = resourceMonitor.getHistoricalStats(req.params.containerId, hours);
success(res, { history, hours });
const history = resourceMonitor.getHistoricalStats(containerId, hours);
success(res, { history, hours, tier: 'raw', samples: history, unit: '10s' });
}, 'monitoring-history'));
// Get aggregated stats