Add auto-update system for DashCaddy instances

- self-updater.js: polls for new versions, downloads/verifies tarballs,
  triggers host-side rebuild via systemd path unit
- dashcaddy-update.sh + systemd units: host-side container rebuild with
  automatic rollback on health check failure
- 7 new /api/v1/system/* endpoints for version info, update check/apply,
  rollback, and update history
- Frontend: DashCaddy tab in Updates modal with version display,
  changelog, update button, rollback, and notification dot
- install.sh: updater service installation, volume mounts, env vars
- build-release.sh + webhook-handler.js: release server pipeline
  (Gitea webhook → build tarball → deploy to get.dashcaddy.net)
- Dockerfile: DASHCADDY_COMMIT build arg → VERSION file
- Version bump to 1.1.0

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-07 03:11:35 -08:00
parent 9a0abc02d1
commit ffa6966fd3
14 changed files with 1395 additions and 4 deletions

View File

@@ -0,0 +1,515 @@
/**
* DashCaddy Self-Updater
* Polls for new versions, downloads and stages updates,
* triggers host-side updater for API container rebuilds.
*
* Frontend files are updated directly (zero-downtime).
* API files require a container rebuild via the host-side systemd service.
*/
const EventEmitter = require('events');
const https = require('https');
const http = require('http');
const fs = require('fs');
const fsp = require('fs').promises;
const path = require('path');
const crypto = require('crypto');
const { execSync } = require('child_process');
const zlib = require('zlib');
const isWindows = process.platform === 'win32';
const DEFAULTS = {
CHECK_INTERVAL: 30 * 60 * 1000, // 30 minutes
UPDATE_URL: 'https://get.dashcaddy.net/release',
MIRROR_URL: 'https://get2.dashcaddy.net/release',
UPDATES_DIR: isWindows ? 'C:/caddy/updates' : '/app/updates',
// API_SOURCE_DIR is the HOST path — written to trigger.json for the host-side updater
API_SOURCE_DIR: isWindows ? 'C:/caddy/sites/dashcaddy-api' : '/etc/dashcaddy/sites/dashcaddy-api',
// FRONTEND_DIR is the container path — dashboard is volume-mounted at /app/dashboard
FRONTEND_DIR: isWindows ? 'C:/caddy/sites/status' : '/app/dashboard',
MAX_BACKUPS: 3,
HEALTH_TIMEOUT: 60000,
DOWNLOAD_TIMEOUT: 120000,
};
class SelfUpdater extends EventEmitter {
constructor(options = {}) {
super();
this.config = {
enabled: options.enabled !== false,
checkInterval: parseInt(options.checkInterval || DEFAULTS.CHECK_INTERVAL, 10),
updateUrl: options.updateUrl || DEFAULTS.UPDATE_URL,
mirrorUrl: options.mirrorUrl || DEFAULTS.MIRROR_URL,
updatesDir: options.updatesDir || DEFAULTS.UPDATES_DIR,
apiSourceDir: options.apiSourceDir || DEFAULTS.API_SOURCE_DIR,
frontendDir: options.frontendDir || DEFAULTS.FRONTEND_DIR,
maxBackups: parseInt(options.maxBackups || DEFAULTS.MAX_BACKUPS, 10),
};
this.status = 'idle'; // idle | checking | downloading | applying | waiting
this.checkTimer = null;
this.lastCheckTime = null;
this.lastCheckResult = null;
// Ensure directories exist
this._ensureDirs();
}
// ── Lifecycle ──
start() {
if (!this.config.enabled || this.checkTimer) return;
console.log('[SelfUpdater] Starting auto-update checks every %ds', this.config.checkInterval / 1000);
// First check after a short delay (let server finish startup)
setTimeout(() => {
this._autoCheckAndApply();
this.checkTimer = setInterval(() => this._autoCheckAndApply(), this.config.checkInterval);
}, 15000);
}
stop() {
if (this.checkTimer) {
clearInterval(this.checkTimer);
this.checkTimer = null;
}
}
// ── Version Info ──
getLocalVersion() {
try {
const pkg = JSON.parse(fs.readFileSync(path.join(__dirname, 'package.json'), 'utf8'));
let commit = null;
try {
commit = fs.readFileSync(path.join(__dirname, 'VERSION'), 'utf8').trim();
} catch (_) {}
return { version: pkg.version, commit };
} catch (e) {
return { version: '0.0.0', commit: null };
}
}
getStatus() {
return this.status;
}
// ── Check for Updates ──
async checkForUpdate() {
this.status = 'checking';
try {
let remote;
try {
remote = await this._fetchJson(`${this.config.updateUrl}/version.json`);
} catch (primaryErr) {
console.warn('[SelfUpdater] Primary server failed:', primaryErr.message, '— trying mirror');
try {
remote = await this._fetchJson(`${this.config.mirrorUrl}/version.json`);
} catch (mirrorErr) {
this.status = 'idle';
this.lastCheckTime = Date.now();
this.lastCheckResult = { available: false, error: 'Update servers unreachable' };
return this.lastCheckResult;
}
}
const local = this.getLocalVersion();
const available = this._isNewer(local, remote);
this.lastCheckTime = Date.now();
this.lastCheckResult = { available, local, remote };
this.status = 'idle';
if (available) {
this.emit('update-available', remote);
}
return this.lastCheckResult;
} catch (e) {
this.status = 'idle';
this.lastCheckTime = Date.now();
this.lastCheckResult = { available: false, error: e.message };
return this.lastCheckResult;
}
}
// ── Apply Update ──
async applyUpdate(remoteInfo) {
if (this.status !== 'idle' && this.status !== 'checking') {
throw new Error(`Update already in progress (status: ${this.status})`);
}
const local = this.getLocalVersion();
const stagingDir = path.join(this.config.updatesDir, 'staging');
try {
// 1. Download
this.status = 'downloading';
this.emit('update-progress', { step: 'downloading', version: remoteInfo.version });
const tarballUrl = `${this.config.updateUrl}/${remoteInfo.tarball}`;
const tarballPath = path.join(this.config.updatesDir, remoteInfo.tarball);
await this._downloadFile(tarballUrl, tarballPath);
// 2. Verify SHA-256
const hash = await this._computeSha256(tarballPath);
if (hash !== remoteInfo.sha256) {
await fsp.unlink(tarballPath).catch(() => {});
throw new Error(`SHA-256 mismatch: expected ${remoteInfo.sha256}, got ${hash}`);
}
// 3. Extract
this.status = 'applying';
this.emit('update-progress', { step: 'extracting', version: remoteInfo.version });
await this._cleanDir(stagingDir);
await this._extractTarball(tarballPath, stagingDir);
// 4. Apply frontend files directly (zero-downtime)
const frontendSrc = this._findDir(stagingDir, 'status');
if (frontendSrc) {
await this._copyDir(frontendSrc, this.config.frontendDir, [
'dist', 'css', 'assets', 'vendor', 'index.html', 'sw.js'
]);
this.emit('update-progress', { step: 'frontend-updated', version: remoteInfo.version });
}
// 5. Trigger API rebuild (Linux only — host-side systemd service)
const apiSrc = this._findDir(stagingDir, 'dashcaddy-api');
if (apiSrc && !isWindows) {
this.status = 'waiting';
this.emit('update-progress', { step: 'triggering-rebuild', version: remoteInfo.version });
const trigger = {
action: 'update',
version: remoteInfo.version,
commit: remoteInfo.commit,
fromVersion: local.version,
stagingDir: apiSrc,
apiSourceDir: this.config.apiSourceDir,
timestamp: new Date().toISOString(),
};
await fsp.writeFile(
path.join(this.config.updatesDir, 'trigger.json'),
JSON.stringify(trigger, null, 2)
);
// The host-side systemd service will handle the rest.
// After container restart, checkPostUpdateResult() reads the result.
this._addToHistory({
version: remoteInfo.version,
fromVersion: local.version,
timestamp: new Date().toISOString(),
status: 'pending',
frontendUpdated: !!frontendSrc,
apiUpdated: true,
});
} else if (isWindows) {
// Windows: frontend updated, API needs manual restart
this._addToHistory({
version: remoteInfo.version,
fromVersion: local.version,
timestamp: new Date().toISOString(),
status: 'partial',
frontendUpdated: !!frontendSrc,
apiUpdated: false,
note: 'API update requires manual container restart on Windows',
});
this.status = 'idle';
}
// Clean up tarball
await fsp.unlink(tarballPath).catch(() => {});
return {
success: true,
fromVersion: local.version,
toVersion: remoteInfo.version,
frontendUpdated: !!frontendSrc,
apiUpdated: !isWindows && !!apiSrc,
};
} catch (e) {
this.status = 'idle';
this._addToHistory({
version: remoteInfo.version,
fromVersion: local.version,
timestamp: new Date().toISOString(),
status: 'failed',
error: e.message,
});
throw e;
}
}
// ── Post-Update Result ──
async checkPostUpdateResult() {
const resultPath = path.join(this.config.updatesDir, 'result.json');
try {
const data = await fsp.readFile(resultPath, 'utf8');
const result = JSON.parse(data);
// Delete the result file so we don't process it again
await fsp.unlink(resultPath).catch(() => {});
// Update history
const history = this.getUpdateHistory();
const pending = history.find(h => h.status === 'pending');
if (pending) {
pending.status = result.success ? 'success' : 'rolled-back';
pending.duration = result.duration;
if (result.error) pending.error = result.error;
this._saveHistory(history);
}
this.status = 'idle';
return result;
} catch (_) {
return null;
}
}
// ── Rollback ──
async rollbackToVersion(version) {
if (isWindows) throw new Error('Auto-rollback not supported on Windows');
const backupDir = path.join(this.config.updatesDir, 'backups', version);
try {
await fsp.access(backupDir);
} catch (_) {
throw new Error(`No backup found for version ${version}`);
}
const local = this.getLocalVersion();
const trigger = {
action: 'rollback',
version: version,
fromVersion: local.version,
stagingDir: backupDir,
apiSourceDir: this.config.apiSourceDir,
timestamp: new Date().toISOString(),
};
this.status = 'waiting';
await fsp.writeFile(
path.join(this.config.updatesDir, 'trigger.json'),
JSON.stringify(trigger, null, 2)
);
this._addToHistory({
version: version,
fromVersion: local.version,
timestamp: new Date().toISOString(),
status: 'pending',
rollback: true,
});
}
getAvailableRollbacks() {
const backupsDir = path.join(this.config.updatesDir, 'backups');
try {
return fs.readdirSync(backupsDir)
.filter(d => fs.statSync(path.join(backupsDir, d)).isDirectory())
.sort()
.reverse();
} catch (_) {
return [];
}
}
// ── History ──
getUpdateHistory() {
const historyPath = path.join(this.config.updatesDir, 'self-update-history.json');
try {
return JSON.parse(fs.readFileSync(historyPath, 'utf8'));
} catch (_) {
return [];
}
}
// ── Private Methods ──
async _autoCheckAndApply() {
try {
const result = await this.checkForUpdate();
if (result.available && result.remote) {
console.log('[SelfUpdater] Update available: %s → %s', result.local.version, result.remote.version);
await this.applyUpdate(result.remote);
}
} catch (e) {
console.error('[SelfUpdater] Auto-update error:', e.message);
}
}
_isNewer(local, remote) {
if (!remote || !remote.version) return false;
// Compare semver: split into [major, minor, patch]
const lv = (local.version || '0.0.0').split('.').map(Number);
const rv = remote.version.split('.').map(Number);
for (let i = 0; i < 3; i++) {
if ((rv[i] || 0) > (lv[i] || 0)) return true;
if ((rv[i] || 0) < (lv[i] || 0)) return false;
}
// Same version — check commit hash
if (remote.commit && local.commit && remote.commit !== local.commit) return true;
return false;
}
_addToHistory(entry) {
const history = this.getUpdateHistory();
history.unshift(entry);
// Keep last 50 entries
if (history.length > 50) history.length = 50;
this._saveHistory(history);
}
_saveHistory(history) {
const historyPath = path.join(this.config.updatesDir, 'self-update-history.json');
try {
fs.writeFileSync(historyPath, JSON.stringify(history, null, 2));
} catch (e) {
console.error('[SelfUpdater] Failed to save history:', e.message);
}
}
async _ensureDirs() {
for (const dir of [this.config.updatesDir, path.join(this.config.updatesDir, 'staging'), path.join(this.config.updatesDir, 'backups')]) {
await fsp.mkdir(dir, { recursive: true }).catch(() => {});
}
}
async _fetchJson(url) {
return new Promise((resolve, reject) => {
const mod = url.startsWith('https') ? https : http;
const req = mod.get(url, { timeout: 15000 }, (res) => {
if (res.statusCode !== 200) {
res.resume();
return reject(new Error(`HTTP ${res.statusCode} from ${url}`));
}
let data = '';
res.on('data', chunk => data += chunk);
res.on('end', () => {
try {
resolve(JSON.parse(data));
} catch (e) {
reject(new Error('Invalid JSON from ' + url));
}
});
});
req.on('error', reject);
req.on('timeout', () => { req.destroy(); reject(new Error('Timeout fetching ' + url)); });
});
}
async _downloadFile(url, dest) {
return new Promise((resolve, reject) => {
const mod = url.startsWith('https') ? https : http;
const file = fs.createWriteStream(dest);
const req = mod.get(url, { timeout: DEFAULTS.DOWNLOAD_TIMEOUT }, (res) => {
if (res.statusCode !== 200) {
file.close();
fs.unlinkSync(dest);
return reject(new Error(`HTTP ${res.statusCode} downloading ${url}`));
}
res.pipe(file);
file.on('finish', () => { file.close(resolve); });
});
req.on('error', (e) => {
file.close();
fs.unlink(dest, () => {});
reject(e);
});
req.on('timeout', () => { req.destroy(); reject(new Error('Download timeout')); });
});
}
async _computeSha256(filePath) {
return new Promise((resolve, reject) => {
const hash = crypto.createHash('sha256');
const stream = fs.createReadStream(filePath);
stream.on('data', chunk => hash.update(chunk));
stream.on('end', () => resolve(hash.digest('hex')));
stream.on('error', reject);
});
}
async _extractTarball(tarballPath, destDir) {
await fsp.mkdir(destDir, { recursive: true });
// Use tar command (available on Linux, and Git Bash on Windows)
try {
execSync(`tar xzf "${tarballPath}" -C "${destDir}" --strip-components=1`, { stdio: 'pipe' });
} catch (e) {
throw new Error('Failed to extract tarball: ' + e.message);
}
}
_findDir(baseDir, name) {
const direct = path.join(baseDir, name);
if (fs.existsSync(direct)) return direct;
// Also check one level deeper (e.g., dashcaddy/dashcaddy-api)
try {
for (const entry of fs.readdirSync(baseDir)) {
const sub = path.join(baseDir, entry, name);
if (fs.existsSync(sub)) return sub;
}
} catch (_) {}
return null;
}
async _copyDir(src, dest, items) {
await fsp.mkdir(dest, { recursive: true });
for (const item of items) {
const srcPath = path.join(src, item);
const destPath = path.join(dest, item);
try {
const stat = await fsp.stat(srcPath);
if (stat.isDirectory()) {
await this._copyDirRecursive(srcPath, destPath);
} else {
await fsp.copyFile(srcPath, destPath);
}
} catch (_) {
// Item may not exist in the update — skip
}
}
}
async _copyDirRecursive(src, dest) {
await fsp.mkdir(dest, { recursive: true });
const entries = await fsp.readdir(src, { withFileTypes: true });
for (const entry of entries) {
const srcPath = path.join(src, entry.name);
const destPath = path.join(dest, entry.name);
if (entry.isDirectory()) {
await this._copyDirRecursive(srcPath, destPath);
} else {
await fsp.copyFile(srcPath, destPath);
}
}
}
async _cleanDir(dir) {
try {
await fsp.rm(dir, { recursive: true, force: true });
} catch (_) {}
await fsp.mkdir(dir, { recursive: true });
}
}
// Singleton
const selfUpdater = new SelfUpdater({
enabled: process.env.DASHCADDY_UPDATE_ENABLED !== 'false',
checkInterval: process.env.DASHCADDY_UPDATE_INTERVAL,
updateUrl: process.env.DASHCADDY_UPDATE_URL,
mirrorUrl: process.env.DASHCADDY_MIRROR_URL,
updatesDir: process.env.DASHCADDY_UPDATES_DIR,
apiSourceDir: process.env.DASHCADDY_API_SOURCE_DIR,
frontendDir: process.env.DASHCADDY_FRONTEND_DIR,
});
module.exports = selfUpdater;