Add auto-update system for DashCaddy instances

- self-updater.js: polls for new versions, downloads/verifies tarballs,
  triggers host-side rebuild via systemd path unit
- dashcaddy-update.sh + systemd units: host-side container rebuild with
  automatic rollback on health check failure
- 7 new /api/v1/system/* endpoints for version info, update check/apply,
  rollback, and update history
- Frontend: DashCaddy tab in Updates modal with version display,
  changelog, update button, rollback, and notification dot
- install.sh: updater service installation, volume mounts, env vars
- build-release.sh + webhook-handler.js: release server pipeline
  (Gitea webhook → build tarball → deploy to get.dashcaddy.net)
- Dockerfile: DASHCADDY_COMMIT build arg → VERSION file
- Version bump to 1.1.0

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-07 03:11:35 -08:00
parent 9a0abc02d1
commit ffa6966fd3
14 changed files with 1395 additions and 4 deletions

View File

@@ -12,6 +12,9 @@ COPY *.js ./
COPY routes/ ./routes/ COPY routes/ ./routes/
COPY openapi.yaml ./ COPY openapi.yaml ./
ARG DASHCADDY_COMMIT=unknown
RUN echo "${DASHCADDY_COMMIT}" > VERSION
# Note: Running as root because container needs Docker socket access # Note: Running as root because container needs Docker socket access
# (which is root-equivalent anyway). Socket access required for container management. # (which is root-equivalent anyway). Socket access required for container management.

View File

@@ -5,7 +5,7 @@
// ── App Identity ────────────────────────────────────────────── // ── App Identity ──────────────────────────────────────────────
const APP = { const APP = {
NAME: 'DashCaddy', NAME: 'DashCaddy',
VERSION: '1.0', VERSION: '1.1',
PORT: 3001, PORT: 3001,
USER_AGENTS: { USER_AGENTS: {
PROBE: 'DashCaddy-Probe/1.0', PROBE: 'DashCaddy-Probe/1.0',

View File

@@ -1,6 +1,6 @@
{ {
"name": "dashcaddy-api", "name": "dashcaddy-api",
"version": "1.0.0", "version": "1.1.0",
"description": "DashCaddy API server - Dashboard backend for Docker, Caddy & DNS management", "description": "DashCaddy API server - Dashboard backend for Docker, Caddy & DNS management",
"main": "server.js", "main": "server.js",
"scripts": { "scripts": {

View File

@@ -95,6 +95,7 @@ const ctx = {
resourceMonitor: null, resourceMonitor: null,
auditLogger: null, auditLogger: null,
portLockManager: null, portLockManager: null,
selfUpdater: null,
// Templates // Templates
APP_TEMPLATES: null, APP_TEMPLATES: null,

View File

@@ -59,5 +59,69 @@ module.exports = function(ctx) {
res.json({ success: true, message: 'Update scheduled', scheduledTime }); res.json({ success: true, message: 'Update scheduled', scheduledTime });
}, 'updates-schedule')); }, 'updates-schedule'));
// ===== DASHCADDY SELF-UPDATE ENDPOINTS =====
// Get current version
router.get('/system/version', ctx.asyncHandler(async (req, res) => {
const local = ctx.selfUpdater.getLocalVersion();
res.json({ success: true, name: 'DashCaddy', version: local.version, commit: local.commit });
}, 'system-version'));
// Check for DashCaddy update
router.get('/system/update-check', ctx.asyncHandler(async (req, res) => {
const result = await ctx.selfUpdater.checkForUpdate();
res.json({ success: true, ...result });
}, 'system-update-check'));
// Apply available update
router.post('/system/update-apply', ctx.asyncHandler(async (req, res) => {
const check = await ctx.selfUpdater.checkForUpdate();
if (!check.available) {
return res.json({ success: true, message: 'Already up to date' });
}
// Start async — container may restart
ctx.selfUpdater.applyUpdate(check.remote).catch(err => {
ctx.logError('self-update', err);
});
res.json({
success: true,
message: 'Update initiated',
fromVersion: check.local.version,
toVersion: check.remote.version,
});
}, 'system-update-apply'));
// Get update status
router.get('/system/update-status', ctx.asyncHandler(async (req, res) => {
res.json({
success: true,
status: ctx.selfUpdater.getStatus(),
lastCheck: ctx.selfUpdater.lastCheckTime,
lastResult: ctx.selfUpdater.lastCheckResult,
});
}, 'system-update-status'));
// Get self-update history
router.get('/system/update-history', ctx.asyncHandler(async (req, res) => {
const history = ctx.selfUpdater.getUpdateHistory();
res.json({ success: true, history });
}, 'system-update-history'));
// List rollback versions
router.get('/system/rollback-versions', ctx.asyncHandler(async (req, res) => {
const versions = ctx.selfUpdater.getAvailableRollbacks();
res.json({ success: true, versions });
}, 'system-rollback-versions'));
// Rollback to a previous version
router.post('/system/rollback', ctx.asyncHandler(async (req, res) => {
const { version } = req.body;
if (!version) return ctx.errorResponse(res, 400, 'version is required');
ctx.selfUpdater.rollbackToVersion(version).catch(err => {
ctx.logError('self-rollback', err);
});
res.json({ success: true, message: `Rollback to ${version} initiated` });
}, 'system-rollback'));
return router; return router;
}; };

View File

@@ -0,0 +1,111 @@
#!/usr/bin/env bash
# DashCaddy Release Builder
# Triggered by Gitea webhook on push to main.
# Clones repo, builds tarball, writes version.json, deploys to web root.
set -euo pipefail
readonly REPO_URL="http://100.98.123.59:3000/sami7777/dashcaddy.git"
readonly RELEASE_DIR="/var/www/get.dashcaddy.net/release"
readonly BUILD_DIR="/tmp/dashcaddy-build-$$"
readonly MIRROR_HOST="root@100.98.123.59" # Contabo DE
readonly BRANCH="main"
log() { echo "[build-release] $(date '+%Y-%m-%d %H:%M:%S') $*"; }
cleanup() { rm -rf "$BUILD_DIR"; }
trap cleanup EXIT
main() {
log "=== Starting release build ==="
# 1. Clone latest
mkdir -p "$BUILD_DIR"
log "Cloning ${BRANCH}..."
git clone --depth 1 --branch "$BRANCH" "$REPO_URL" "$BUILD_DIR/repo" 2>&1
cd "$BUILD_DIR/repo"
local commit
commit=$(git rev-parse --short HEAD)
log "Commit: ${commit}"
# 2. Read version from package.json
local version
version=$(python3 -c "import json; print(json.load(open('dashcaddy-api/package.json'))['version'])")
log "Version: ${version}"
# 3. Build changelog (last 10 commits, one-liner)
local changelog
changelog=$(git log --oneline -10 --no-decorate 2>/dev/null || echo "${commit} (no log)")
# 4. Assemble tarball contents
local staging="$BUILD_DIR/dashcaddy"
mkdir -p "$staging/dashcaddy-api/routes" "$staging/status" "$staging/scripts"
# API files
cp -f dashcaddy-api/*.js "$staging/dashcaddy-api/" 2>/dev/null || true
cp -rf dashcaddy-api/routes/* "$staging/dashcaddy-api/routes/" 2>/dev/null || true
cp -f dashcaddy-api/package.json "$staging/dashcaddy-api/"
cp -f dashcaddy-api/package-lock.json "$staging/dashcaddy-api/" 2>/dev/null || true
cp -f dashcaddy-api/Dockerfile "$staging/dashcaddy-api/"
cp -f dashcaddy-api/openapi.yaml "$staging/dashcaddy-api/" 2>/dev/null || true
# Dashboard files
cp -f status/index.html "$staging/status/"
cp -f status/sw.js "$staging/status/" 2>/dev/null || true
for dir in css js dist vendor assets; do
[ -d "status/${dir}" ] && cp -rf "status/${dir}" "$staging/status/"
done
# Updater scripts
cp -f dashcaddy-api/scripts/dashcaddy-update.sh "$staging/scripts/" 2>/dev/null || true
cp -f dashcaddy-api/scripts/dashcaddy-updater.path "$staging/scripts/" 2>/dev/null || true
cp -f dashcaddy-api/scripts/dashcaddy-updater.service "$staging/scripts/" 2>/dev/null || true
# 5. Create tarball
local tarball="dashcaddy-${version}.tar.gz"
cd "$BUILD_DIR"
tar czf "$tarball" dashcaddy/
log "Tarball: ${tarball} ($(du -h "$tarball" | cut -f1))"
# 6. Compute SHA-256
local sha256
sha256=$(sha256sum "$tarball" | cut -d' ' -f1)
log "SHA-256: ${sha256}"
# 7. Write version.json
cat > version.json <<EOF
{
"version": "${version}",
"commit": "${commit}",
"date": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
"sha256": "${sha256}",
"changelog": $(python3 -c "import json; print(json.dumps('''${changelog}'''))"),
"breaking": false,
"tarball": "${tarball}"
}
EOF
# 8. Deploy to web root
mkdir -p "$RELEASE_DIR"
cp -f "$tarball" "$RELEASE_DIR/"
cp -f version.json "$RELEASE_DIR/"
# Also keep a "latest" symlink/copy
cp -f "$tarball" "$RELEASE_DIR/latest.tar.gz"
log "Deployed to ${RELEASE_DIR}"
# 9. Sync to mirror (Contabo DE)
if ssh -o ConnectTimeout=5 "$MIRROR_HOST" true 2>/dev/null; then
log "Syncing to mirror..."
rsync -az --timeout=30 "$RELEASE_DIR/" "$MIRROR_HOST:/var/www/get2.dashcaddy.net/release/" 2>&1 || {
log "WARNING: Mirror sync failed (non-fatal)"
}
log "Mirror synced"
else
log "WARNING: Mirror host unreachable, skipping sync"
fi
log "=== Release build complete: v${version} (${commit}) ==="
}
main "$@"

View File

@@ -0,0 +1,219 @@
#!/usr/bin/env bash
# DashCaddy Host-Side Updater
# Triggered by systemd path unit when the API container writes trigger.json.
# Handles API container rebuild + restart with automatic rollback on failure.
set -euo pipefail
readonly UPDATES_DIR="/opt/dashcaddy/updates"
readonly TRIGGER_FILE="${UPDATES_DIR}/trigger.json"
readonly RESULT_FILE="${UPDATES_DIR}/result.json"
readonly BACKUP_BASE="${UPDATES_DIR}/backups"
readonly HEALTH_URL="http://localhost:3001/health"
readonly HEALTH_TIMEOUT=60
readonly MAX_BACKUPS=3
log() { echo "[dashcaddy-update] $(date '+%Y-%m-%d %H:%M:%S') $*"; }
write_result() {
local success="$1" message="$2" version="$3" duration="$4"
cat > "$RESULT_FILE" <<EOF
{
"success": ${success},
"version": "${version}",
"message": "${message}",
"duration": ${duration},
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
}
EOF
log "Result written: success=${success} version=${version}"
}
backup_current() {
local api_dir="$1" version="$2"
local backup_dir="${BACKUP_BASE}/${version}"
mkdir -p "$backup_dir/routes"
cp -f "${api_dir}"/*.js "$backup_dir/" 2>/dev/null || true
cp -rf "${api_dir}/routes/"* "$backup_dir/routes/" 2>/dev/null || true
cp -f "${api_dir}/package.json" "$backup_dir/" 2>/dev/null || true
cp -f "${api_dir}/package-lock.json" "$backup_dir/" 2>/dev/null || true
cp -f "${api_dir}/Dockerfile" "$backup_dir/" 2>/dev/null || true
cp -f "${api_dir}/openapi.yaml" "$backup_dir/" 2>/dev/null || true
log "Backed up version ${version} to ${backup_dir}"
}
restore_backup() {
local api_dir="$1" version="$2"
local backup_dir="${BACKUP_BASE}/${version}"
if [ ! -d "$backup_dir" ]; then
log "ERROR: No backup found for version ${version}"
return 1
fi
cp -f "${backup_dir}"/*.js "$api_dir/" 2>/dev/null || true
cp -rf "${backup_dir}/routes/"* "${api_dir}/routes/" 2>/dev/null || true
cp -f "${backup_dir}/package.json" "$api_dir/" 2>/dev/null || true
cp -f "${backup_dir}/package-lock.json" "$api_dir/" 2>/dev/null || true
cp -f "${backup_dir}/Dockerfile" "$api_dir/" 2>/dev/null || true
cp -f "${backup_dir}/openapi.yaml" "$api_dir/" 2>/dev/null || true
log "Restored version ${version} from ${backup_dir}"
}
copy_new_files() {
local staging_dir="$1" api_dir="$2"
cp -f "${staging_dir}"/*.js "$api_dir/" 2>/dev/null || true
[ -d "${staging_dir}/routes" ] && cp -rf "${staging_dir}/routes/"* "${api_dir}/routes/" 2>/dev/null || true
cp -f "${staging_dir}/package.json" "$api_dir/" 2>/dev/null || true
cp -f "${staging_dir}/package-lock.json" "$api_dir/" 2>/dev/null || true
cp -f "${staging_dir}/Dockerfile" "$api_dir/" 2>/dev/null || true
cp -f "${staging_dir}/openapi.yaml" "$api_dir/" 2>/dev/null || true
log "Copied new files from ${staging_dir} to ${api_dir}"
}
wait_for_health() {
local attempt=0
local max_attempts=$((HEALTH_TIMEOUT / 2))
while (( attempt < max_attempts )); do
if curl -fsS --max-time 3 "$HEALTH_URL" >/dev/null 2>&1; then
log "Health check passed (attempt $((attempt+1)))"
return 0
fi
sleep 2
attempt=$((attempt + 1))
done
log "Health check FAILED after ${HEALTH_TIMEOUT}s"
return 1
}
find_compose_dir() {
# Find the docker-compose.yml for dashcaddy-api
for dir in /etc/dashcaddy/sites/dashcaddy-api /etc/dashcaddy/sites/caddy-api; do
if [ -f "${dir}/docker-compose.yml" ] || [ -f "${dir}/docker-compose.yaml" ]; then
echo "$dir"
return 0
fi
done
# Fallback: same as api source
echo "$1"
}
cleanup_old_backups() {
if [ ! -d "$BACKUP_BASE" ]; then return; fi
local count
count=$(ls -1d "${BACKUP_BASE}"/*/ 2>/dev/null | wc -l)
if (( count > MAX_BACKUPS )); then
local to_remove=$((count - MAX_BACKUPS))
ls -1d "${BACKUP_BASE}"/*/ 2>/dev/null | head -n "$to_remove" | while read -r dir; do
rm -rf "$dir"
log "Cleaned old backup: $dir"
done
fi
}
main() {
if [ ! -f "$TRIGGER_FILE" ]; then
log "No trigger file found, exiting"
exit 0
fi
local start_time
start_time=$(date +%s)
# Parse trigger file
local action version from_version staging_dir api_dir commit
action=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('action','update'))" 2>/dev/null || echo "update")
version=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('version','unknown'))" 2>/dev/null || echo "unknown")
from_version=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('fromVersion','unknown'))" 2>/dev/null || echo "unknown")
staging_dir=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('stagingDir',''))" 2>/dev/null || echo "")
api_dir=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('apiSourceDir','/opt/dashcaddy'))" 2>/dev/null || echo "/opt/dashcaddy")
commit=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('commit','unknown'))" 2>/dev/null || echo "unknown")
log "=== DashCaddy ${action} started: ${from_version}${version} (${commit}) ==="
if [ -z "$staging_dir" ] || [ ! -d "$staging_dir" ]; then
log "ERROR: Staging directory not found: ${staging_dir}"
write_result "false" "Staging directory not found" "$version" "0"
rm -f "$TRIGGER_FILE"
exit 1
fi
local compose_dir
compose_dir=$(find_compose_dir "$api_dir")
# Step 1: Backup current version
log "Step 1: Backing up current version (${from_version})"
backup_current "$api_dir" "$from_version"
# Step 2: Copy new files
log "Step 2: Copying new files"
copy_new_files "$staging_dir" "$api_dir"
# Write commit hash to VERSION file
echo "$commit" > "${api_dir}/VERSION"
# Step 3: Rebuild container
log "Step 3: Building new container image"
cd "$compose_dir"
if ! DASHCADDY_COMMIT="$commit" docker compose build --quiet 2>&1; then
log "ERROR: docker compose build failed, rolling back"
restore_backup "$api_dir" "$from_version"
local elapsed=$(( $(date +%s) - start_time ))
write_result "false" "Build failed, rolled back to ${from_version}" "$version" "$((elapsed * 1000))"
rm -f "$TRIGGER_FILE"
exit 1
fi
# Step 4: Restart container
log "Step 4: Restarting container"
if ! docker compose up -d 2>&1; then
log "ERROR: docker compose up failed, rolling back"
restore_backup "$api_dir" "$from_version"
docker compose build --quiet 2>&1 || true
docker compose up -d 2>&1 || true
local elapsed=$(( $(date +%s) - start_time ))
write_result "false" "Container start failed, rolled back to ${from_version}" "$version" "$((elapsed * 1000))"
rm -f "$TRIGGER_FILE"
exit 1
fi
# Step 5: Health check
log "Step 5: Waiting for health check (${HEALTH_TIMEOUT}s timeout)"
if wait_for_health; then
local elapsed=$(( $(date +%s) - start_time ))
log "=== Update to ${version} SUCCESSFUL (${elapsed}s) ==="
write_result "true" "Update successful" "$version" "$((elapsed * 1000))"
else
log "Health check failed — ROLLING BACK to ${from_version}"
restore_backup "$api_dir" "$from_version"
cd "$compose_dir"
docker compose build --quiet 2>&1 || true
docker compose up -d 2>&1 || true
if wait_for_health; then
local elapsed=$(( $(date +%s) - start_time ))
log "Rollback to ${from_version} succeeded"
write_result "false" "Health check failed after update. Rolled back to ${from_version}." "$version" "$((elapsed * 1000))"
else
local elapsed=$(( $(date +%s) - start_time ))
log "CRITICAL: Rollback also failed. Manual intervention required."
write_result "false" "CRITICAL: Both update and rollback failed. Manual intervention required." "$version" "$((elapsed * 1000))"
fi
fi
# Cleanup
rm -f "$TRIGGER_FILE"
rm -rf "${UPDATES_DIR}/staging"
cleanup_old_backups
log "Update process complete"
}
main "$@"

View File

@@ -0,0 +1,10 @@
[Unit]
Description=Watch for DashCaddy update trigger
Documentation=https://dashcaddy.net
[Path]
PathChanged=/opt/dashcaddy/updates/trigger.json
MakeDirectory=yes
[Install]
WantedBy=multi-user.target

View File

@@ -0,0 +1,13 @@
[Unit]
Description=DashCaddy auto-update handler
Documentation=https://dashcaddy.net
After=docker.service
Requires=docker.service
[Service]
Type=oneshot
ExecStart=/opt/dashcaddy/scripts/dashcaddy-update.sh
TimeoutStartSec=300
StandardOutput=journal
StandardError=journal
SyslogIdentifier=dashcaddy-update

View File

@@ -0,0 +1,136 @@
#!/usr/bin/env node
/**
* DashCaddy Release Webhook Handler
* Receives push webhooks from Gitea, verifies HMAC signature,
* and triggers build-release.sh.
*
* Usage: node webhook-handler.js
* Env vars:
* WEBHOOK_SECRET — Gitea webhook secret (required)
* WEBHOOK_PORT — Listen port (default: 9090)
* BUILD_SCRIPT — Path to build script (default: /opt/dashcaddy-release/build-release.sh)
*/
const http = require('http');
const crypto = require('crypto');
const { spawn } = require('child_process');
const fs = require('fs');
const PORT = parseInt(process.env.WEBHOOK_PORT || '9090', 10);
const SECRET = process.env.WEBHOOK_SECRET;
const BUILD_SCRIPT = process.env.BUILD_SCRIPT || '/opt/dashcaddy-release/build-release.sh';
const LOG_FILE = '/var/log/dashcaddy-release.log';
if (!SECRET) {
console.error('WEBHOOK_SECRET environment variable is required');
process.exit(1);
}
let buildRunning = false;
function log(msg) {
const line = `[webhook] ${new Date().toISOString()} ${msg}`;
console.log(line);
fs.appendFileSync(LOG_FILE, line + '\n');
}
function verifySignature(body, signature) {
if (!signature) return false;
const hmac = crypto.createHmac('sha256', SECRET).update(body).digest('hex');
return crypto.timingSafeEqual(
Buffer.from(signature),
Buffer.from(hmac)
);
}
function triggerBuild() {
if (buildRunning) {
log('Build already in progress, skipping');
return;
}
buildRunning = true;
log('Triggering build...');
const child = spawn('bash', [BUILD_SCRIPT], {
stdio: ['ignore', 'pipe', 'pipe'],
env: { ...process.env, PATH: process.env.PATH },
});
child.stdout.on('data', (data) => {
const lines = data.toString().trim().split('\n');
lines.forEach(line => log(`[build] ${line}`));
});
child.stderr.on('data', (data) => {
const lines = data.toString().trim().split('\n');
lines.forEach(line => log(`[build:err] ${line}`));
});
child.on('close', (code) => {
buildRunning = false;
if (code === 0) {
log('Build completed successfully');
} else {
log(`Build FAILED with exit code ${code}`);
}
});
}
const server = http.createServer((req, res) => {
// Health check
if (req.method === 'GET' && req.url === '/health') {
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ status: 'ok', buildRunning }));
return;
}
// Only accept POST to /webhook
if (req.method !== 'POST' || req.url !== '/webhook') {
res.writeHead(404);
res.end('Not found');
return;
}
let body = '';
req.on('data', chunk => { body += chunk; });
req.on('end', () => {
// Verify Gitea HMAC signature
const sig = req.headers['x-gitea-signature'] || '';
if (!verifySignature(body, sig)) {
log('Signature verification FAILED');
res.writeHead(403);
res.end('Invalid signature');
return;
}
try {
const payload = JSON.parse(body);
const ref = payload.ref || '';
const branch = ref.replace('refs/heads/', '');
if (branch !== 'main') {
log(`Ignoring push to ${branch} (not main)`);
res.writeHead(200);
res.end('Ignored (not main branch)');
return;
}
const pusher = payload.pusher?.login || 'unknown';
const commits = payload.commits?.length || 0;
log(`Push to main by ${pusher}: ${commits} commit(s)`);
triggerBuild();
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ accepted: true }));
} catch (e) {
log('Failed to parse webhook payload: ' + e.message);
res.writeHead(400);
res.end('Invalid payload');
}
});
});
server.listen(PORT, '0.0.0.0', () => {
log(`Webhook handler listening on 127.0.0.1:${PORT}`);
});

View File

@@ -0,0 +1,515 @@
/**
* DashCaddy Self-Updater
* Polls for new versions, downloads and stages updates,
* triggers host-side updater for API container rebuilds.
*
* Frontend files are updated directly (zero-downtime).
* API files require a container rebuild via the host-side systemd service.
*/
const EventEmitter = require('events');
const https = require('https');
const http = require('http');
const fs = require('fs');
const fsp = require('fs').promises;
const path = require('path');
const crypto = require('crypto');
const { execSync } = require('child_process');
const zlib = require('zlib');
const isWindows = process.platform === 'win32';
const DEFAULTS = {
CHECK_INTERVAL: 30 * 60 * 1000, // 30 minutes
UPDATE_URL: 'https://get.dashcaddy.net/release',
MIRROR_URL: 'https://get2.dashcaddy.net/release',
UPDATES_DIR: isWindows ? 'C:/caddy/updates' : '/app/updates',
// API_SOURCE_DIR is the HOST path — written to trigger.json for the host-side updater
API_SOURCE_DIR: isWindows ? 'C:/caddy/sites/dashcaddy-api' : '/etc/dashcaddy/sites/dashcaddy-api',
// FRONTEND_DIR is the container path — dashboard is volume-mounted at /app/dashboard
FRONTEND_DIR: isWindows ? 'C:/caddy/sites/status' : '/app/dashboard',
MAX_BACKUPS: 3,
HEALTH_TIMEOUT: 60000,
DOWNLOAD_TIMEOUT: 120000,
};
class SelfUpdater extends EventEmitter {
constructor(options = {}) {
super();
this.config = {
enabled: options.enabled !== false,
checkInterval: parseInt(options.checkInterval || DEFAULTS.CHECK_INTERVAL, 10),
updateUrl: options.updateUrl || DEFAULTS.UPDATE_URL,
mirrorUrl: options.mirrorUrl || DEFAULTS.MIRROR_URL,
updatesDir: options.updatesDir || DEFAULTS.UPDATES_DIR,
apiSourceDir: options.apiSourceDir || DEFAULTS.API_SOURCE_DIR,
frontendDir: options.frontendDir || DEFAULTS.FRONTEND_DIR,
maxBackups: parseInt(options.maxBackups || DEFAULTS.MAX_BACKUPS, 10),
};
this.status = 'idle'; // idle | checking | downloading | applying | waiting
this.checkTimer = null;
this.lastCheckTime = null;
this.lastCheckResult = null;
// Ensure directories exist
this._ensureDirs();
}
// ── Lifecycle ──
start() {
if (!this.config.enabled || this.checkTimer) return;
console.log('[SelfUpdater] Starting auto-update checks every %ds', this.config.checkInterval / 1000);
// First check after a short delay (let server finish startup)
setTimeout(() => {
this._autoCheckAndApply();
this.checkTimer = setInterval(() => this._autoCheckAndApply(), this.config.checkInterval);
}, 15000);
}
stop() {
if (this.checkTimer) {
clearInterval(this.checkTimer);
this.checkTimer = null;
}
}
// ── Version Info ──
getLocalVersion() {
try {
const pkg = JSON.parse(fs.readFileSync(path.join(__dirname, 'package.json'), 'utf8'));
let commit = null;
try {
commit = fs.readFileSync(path.join(__dirname, 'VERSION'), 'utf8').trim();
} catch (_) {}
return { version: pkg.version, commit };
} catch (e) {
return { version: '0.0.0', commit: null };
}
}
getStatus() {
return this.status;
}
// ── Check for Updates ──
async checkForUpdate() {
this.status = 'checking';
try {
let remote;
try {
remote = await this._fetchJson(`${this.config.updateUrl}/version.json`);
} catch (primaryErr) {
console.warn('[SelfUpdater] Primary server failed:', primaryErr.message, '— trying mirror');
try {
remote = await this._fetchJson(`${this.config.mirrorUrl}/version.json`);
} catch (mirrorErr) {
this.status = 'idle';
this.lastCheckTime = Date.now();
this.lastCheckResult = { available: false, error: 'Update servers unreachable' };
return this.lastCheckResult;
}
}
const local = this.getLocalVersion();
const available = this._isNewer(local, remote);
this.lastCheckTime = Date.now();
this.lastCheckResult = { available, local, remote };
this.status = 'idle';
if (available) {
this.emit('update-available', remote);
}
return this.lastCheckResult;
} catch (e) {
this.status = 'idle';
this.lastCheckTime = Date.now();
this.lastCheckResult = { available: false, error: e.message };
return this.lastCheckResult;
}
}
// ── Apply Update ──
async applyUpdate(remoteInfo) {
if (this.status !== 'idle' && this.status !== 'checking') {
throw new Error(`Update already in progress (status: ${this.status})`);
}
const local = this.getLocalVersion();
const stagingDir = path.join(this.config.updatesDir, 'staging');
try {
// 1. Download
this.status = 'downloading';
this.emit('update-progress', { step: 'downloading', version: remoteInfo.version });
const tarballUrl = `${this.config.updateUrl}/${remoteInfo.tarball}`;
const tarballPath = path.join(this.config.updatesDir, remoteInfo.tarball);
await this._downloadFile(tarballUrl, tarballPath);
// 2. Verify SHA-256
const hash = await this._computeSha256(tarballPath);
if (hash !== remoteInfo.sha256) {
await fsp.unlink(tarballPath).catch(() => {});
throw new Error(`SHA-256 mismatch: expected ${remoteInfo.sha256}, got ${hash}`);
}
// 3. Extract
this.status = 'applying';
this.emit('update-progress', { step: 'extracting', version: remoteInfo.version });
await this._cleanDir(stagingDir);
await this._extractTarball(tarballPath, stagingDir);
// 4. Apply frontend files directly (zero-downtime)
const frontendSrc = this._findDir(stagingDir, 'status');
if (frontendSrc) {
await this._copyDir(frontendSrc, this.config.frontendDir, [
'dist', 'css', 'assets', 'vendor', 'index.html', 'sw.js'
]);
this.emit('update-progress', { step: 'frontend-updated', version: remoteInfo.version });
}
// 5. Trigger API rebuild (Linux only — host-side systemd service)
const apiSrc = this._findDir(stagingDir, 'dashcaddy-api');
if (apiSrc && !isWindows) {
this.status = 'waiting';
this.emit('update-progress', { step: 'triggering-rebuild', version: remoteInfo.version });
const trigger = {
action: 'update',
version: remoteInfo.version,
commit: remoteInfo.commit,
fromVersion: local.version,
stagingDir: apiSrc,
apiSourceDir: this.config.apiSourceDir,
timestamp: new Date().toISOString(),
};
await fsp.writeFile(
path.join(this.config.updatesDir, 'trigger.json'),
JSON.stringify(trigger, null, 2)
);
// The host-side systemd service will handle the rest.
// After container restart, checkPostUpdateResult() reads the result.
this._addToHistory({
version: remoteInfo.version,
fromVersion: local.version,
timestamp: new Date().toISOString(),
status: 'pending',
frontendUpdated: !!frontendSrc,
apiUpdated: true,
});
} else if (isWindows) {
// Windows: frontend updated, API needs manual restart
this._addToHistory({
version: remoteInfo.version,
fromVersion: local.version,
timestamp: new Date().toISOString(),
status: 'partial',
frontendUpdated: !!frontendSrc,
apiUpdated: false,
note: 'API update requires manual container restart on Windows',
});
this.status = 'idle';
}
// Clean up tarball
await fsp.unlink(tarballPath).catch(() => {});
return {
success: true,
fromVersion: local.version,
toVersion: remoteInfo.version,
frontendUpdated: !!frontendSrc,
apiUpdated: !isWindows && !!apiSrc,
};
} catch (e) {
this.status = 'idle';
this._addToHistory({
version: remoteInfo.version,
fromVersion: local.version,
timestamp: new Date().toISOString(),
status: 'failed',
error: e.message,
});
throw e;
}
}
// ── Post-Update Result ──
async checkPostUpdateResult() {
const resultPath = path.join(this.config.updatesDir, 'result.json');
try {
const data = await fsp.readFile(resultPath, 'utf8');
const result = JSON.parse(data);
// Delete the result file so we don't process it again
await fsp.unlink(resultPath).catch(() => {});
// Update history
const history = this.getUpdateHistory();
const pending = history.find(h => h.status === 'pending');
if (pending) {
pending.status = result.success ? 'success' : 'rolled-back';
pending.duration = result.duration;
if (result.error) pending.error = result.error;
this._saveHistory(history);
}
this.status = 'idle';
return result;
} catch (_) {
return null;
}
}
// ── Rollback ──
async rollbackToVersion(version) {
if (isWindows) throw new Error('Auto-rollback not supported on Windows');
const backupDir = path.join(this.config.updatesDir, 'backups', version);
try {
await fsp.access(backupDir);
} catch (_) {
throw new Error(`No backup found for version ${version}`);
}
const local = this.getLocalVersion();
const trigger = {
action: 'rollback',
version: version,
fromVersion: local.version,
stagingDir: backupDir,
apiSourceDir: this.config.apiSourceDir,
timestamp: new Date().toISOString(),
};
this.status = 'waiting';
await fsp.writeFile(
path.join(this.config.updatesDir, 'trigger.json'),
JSON.stringify(trigger, null, 2)
);
this._addToHistory({
version: version,
fromVersion: local.version,
timestamp: new Date().toISOString(),
status: 'pending',
rollback: true,
});
}
getAvailableRollbacks() {
const backupsDir = path.join(this.config.updatesDir, 'backups');
try {
return fs.readdirSync(backupsDir)
.filter(d => fs.statSync(path.join(backupsDir, d)).isDirectory())
.sort()
.reverse();
} catch (_) {
return [];
}
}
// ── History ──
getUpdateHistory() {
const historyPath = path.join(this.config.updatesDir, 'self-update-history.json');
try {
return JSON.parse(fs.readFileSync(historyPath, 'utf8'));
} catch (_) {
return [];
}
}
// ── Private Methods ──
async _autoCheckAndApply() {
try {
const result = await this.checkForUpdate();
if (result.available && result.remote) {
console.log('[SelfUpdater] Update available: %s → %s', result.local.version, result.remote.version);
await this.applyUpdate(result.remote);
}
} catch (e) {
console.error('[SelfUpdater] Auto-update error:', e.message);
}
}
_isNewer(local, remote) {
if (!remote || !remote.version) return false;
// Compare semver: split into [major, minor, patch]
const lv = (local.version || '0.0.0').split('.').map(Number);
const rv = remote.version.split('.').map(Number);
for (let i = 0; i < 3; i++) {
if ((rv[i] || 0) > (lv[i] || 0)) return true;
if ((rv[i] || 0) < (lv[i] || 0)) return false;
}
// Same version — check commit hash
if (remote.commit && local.commit && remote.commit !== local.commit) return true;
return false;
}
_addToHistory(entry) {
const history = this.getUpdateHistory();
history.unshift(entry);
// Keep last 50 entries
if (history.length > 50) history.length = 50;
this._saveHistory(history);
}
_saveHistory(history) {
const historyPath = path.join(this.config.updatesDir, 'self-update-history.json');
try {
fs.writeFileSync(historyPath, JSON.stringify(history, null, 2));
} catch (e) {
console.error('[SelfUpdater] Failed to save history:', e.message);
}
}
async _ensureDirs() {
for (const dir of [this.config.updatesDir, path.join(this.config.updatesDir, 'staging'), path.join(this.config.updatesDir, 'backups')]) {
await fsp.mkdir(dir, { recursive: true }).catch(() => {});
}
}
async _fetchJson(url) {
return new Promise((resolve, reject) => {
const mod = url.startsWith('https') ? https : http;
const req = mod.get(url, { timeout: 15000 }, (res) => {
if (res.statusCode !== 200) {
res.resume();
return reject(new Error(`HTTP ${res.statusCode} from ${url}`));
}
let data = '';
res.on('data', chunk => data += chunk);
res.on('end', () => {
try {
resolve(JSON.parse(data));
} catch (e) {
reject(new Error('Invalid JSON from ' + url));
}
});
});
req.on('error', reject);
req.on('timeout', () => { req.destroy(); reject(new Error('Timeout fetching ' + url)); });
});
}
async _downloadFile(url, dest) {
return new Promise((resolve, reject) => {
const mod = url.startsWith('https') ? https : http;
const file = fs.createWriteStream(dest);
const req = mod.get(url, { timeout: DEFAULTS.DOWNLOAD_TIMEOUT }, (res) => {
if (res.statusCode !== 200) {
file.close();
fs.unlinkSync(dest);
return reject(new Error(`HTTP ${res.statusCode} downloading ${url}`));
}
res.pipe(file);
file.on('finish', () => { file.close(resolve); });
});
req.on('error', (e) => {
file.close();
fs.unlink(dest, () => {});
reject(e);
});
req.on('timeout', () => { req.destroy(); reject(new Error('Download timeout')); });
});
}
async _computeSha256(filePath) {
return new Promise((resolve, reject) => {
const hash = crypto.createHash('sha256');
const stream = fs.createReadStream(filePath);
stream.on('data', chunk => hash.update(chunk));
stream.on('end', () => resolve(hash.digest('hex')));
stream.on('error', reject);
});
}
async _extractTarball(tarballPath, destDir) {
await fsp.mkdir(destDir, { recursive: true });
// Use tar command (available on Linux, and Git Bash on Windows)
try {
execSync(`tar xzf "${tarballPath}" -C "${destDir}" --strip-components=1`, { stdio: 'pipe' });
} catch (e) {
throw new Error('Failed to extract tarball: ' + e.message);
}
}
_findDir(baseDir, name) {
const direct = path.join(baseDir, name);
if (fs.existsSync(direct)) return direct;
// Also check one level deeper (e.g., dashcaddy/dashcaddy-api)
try {
for (const entry of fs.readdirSync(baseDir)) {
const sub = path.join(baseDir, entry, name);
if (fs.existsSync(sub)) return sub;
}
} catch (_) {}
return null;
}
async _copyDir(src, dest, items) {
await fsp.mkdir(dest, { recursive: true });
for (const item of items) {
const srcPath = path.join(src, item);
const destPath = path.join(dest, item);
try {
const stat = await fsp.stat(srcPath);
if (stat.isDirectory()) {
await this._copyDirRecursive(srcPath, destPath);
} else {
await fsp.copyFile(srcPath, destPath);
}
} catch (_) {
// Item may not exist in the update — skip
}
}
}
async _copyDirRecursive(src, dest) {
await fsp.mkdir(dest, { recursive: true });
const entries = await fsp.readdir(src, { withFileTypes: true });
for (const entry of entries) {
const srcPath = path.join(src, entry.name);
const destPath = path.join(dest, entry.name);
if (entry.isDirectory()) {
await this._copyDirRecursive(srcPath, destPath);
} else {
await fsp.copyFile(srcPath, destPath);
}
}
}
async _cleanDir(dir) {
try {
await fsp.rm(dir, { recursive: true, force: true });
} catch (_) {}
await fsp.mkdir(dir, { recursive: true });
}
}
// Singleton
const selfUpdater = new SelfUpdater({
enabled: process.env.DASHCADDY_UPDATE_ENABLED !== 'false',
checkInterval: process.env.DASHCADDY_UPDATE_INTERVAL,
updateUrl: process.env.DASHCADDY_UPDATE_URL,
mirrorUrl: process.env.DASHCADDY_MIRROR_URL,
updatesDir: process.env.DASHCADDY_UPDATES_DIR,
apiSourceDir: process.env.DASHCADDY_API_SOURCE_DIR,
frontendDir: process.env.DASHCADDY_FRONTEND_DIR,
});
module.exports = selfUpdater;

View File

@@ -49,6 +49,7 @@ const resourceMonitor = require('./resource-monitor');
const backupManager = require('./backup-manager'); const backupManager = require('./backup-manager');
const healthChecker = require('./health-checker'); const healthChecker = require('./health-checker');
const updateManager = require('./update-manager'); const updateManager = require('./update-manager');
const selfUpdater = require('./self-updater');
const StateManager = require('./state-manager'); const StateManager = require('./state-manager');
const auditLogger = require('./audit-logger'); const auditLogger = require('./audit-logger');
const portLockManager = require('./port-lock-manager'); const portLockManager = require('./port-lock-manager');
@@ -1160,7 +1161,7 @@ Object.assign(ctx, {
app, siteConfig, servicesStateManager, configStateManager, app, siteConfig, servicesStateManager, configStateManager,
credentialManager, authManager, licenseManager, credentialManager, authManager, licenseManager,
healthChecker, updateManager, backupManager, resourceMonitor, healthChecker, updateManager, backupManager, resourceMonitor,
auditLogger, portLockManager, auditLogger, portLockManager, selfUpdater,
APP_TEMPLATES, TEMPLATE_CATEGORIES, DIFFICULTY_LEVELS, RECIPE_TEMPLATES, RECIPE_CATEGORIES, APP_TEMPLATES, TEMPLATE_CATEGORIES, DIFFICULTY_LEVELS, RECIPE_TEMPLATES, RECIPE_CATEGORIES,
asyncHandler, errorResponse, ok, fetchT, log, logError, safeErrorMessage, asyncHandler, errorResponse, ok, fetchT, log, logError, safeErrorMessage,
buildDomain, buildServiceUrl, getServiceById, readConfig, saveConfig, addServiceToConfig, buildDomain, buildServiceUrl, getServiceById, readConfig, saveConfig, addServiceToConfig,
@@ -1864,6 +1865,26 @@ const server = app.listen(PORT, '0.0.0.0', () => {
log.error('server', 'Update manager failed to start', { error: error.message }); log.error('server', 'Update manager failed to start', { error: error.message });
} }
try {
selfUpdater.start();
log.info('server', 'Self-updater started', { interval: selfUpdater.config.checkInterval, url: selfUpdater.config.updateUrl });
// Check for post-update result (did a previous update succeed or roll back?)
selfUpdater.checkPostUpdateResult().then(result => {
if (result) {
log.info('server', 'Post-update result', result);
if (typeof ctx.notification?.send === 'function') {
ctx.notification.send('system.update',
result.success ? 'DashCaddy Updated' : 'DashCaddy Update Failed',
result.success ? `Updated to v${result.version}` : `Update failed: ${result.error || 'Unknown'}. Rolled back.`,
result.success ? 'info' : 'error'
);
}
}
}).catch(() => {});
} catch (error) {
log.error('server', 'Self-updater failed to start', { error: error.message });
}
// Tailscale API sync (if OAuth configured) // Tailscale API sync (if OAuth configured)
if (tailscaleConfig.oauthConfigured) { if (tailscaleConfig.oauthConfigured) {
startTailscaleSyncTimer(); startTailscaleSyncTimer();
@@ -1881,6 +1902,7 @@ function shutdown(signal) {
backupManager.stop(); backupManager.stop();
healthChecker.stop(); healthChecker.stop();
updateManager.stop(); updateManager.stop();
selfUpdater.stop();
stopTailscaleSyncTimer(); stopTailscaleSyncTimer();
server.close(() => { server.close(() => {
log.info('shutdown', 'HTTP server closed'); log.info('shutdown', 'HTTP server closed');

View File

@@ -17,7 +17,7 @@
set -euo pipefail set -euo pipefail
# ---- Constants ------------------------------------------------------------- # ---- Constants -------------------------------------------------------------
readonly DASHCADDY_VERSION="1.0.0" readonly DASHCADDY_VERSION="1.1.0"
readonly DASHCADDY_DOWNLOAD="https://get.dashcaddy.net/release/latest.tar.gz" readonly DASHCADDY_DOWNLOAD="https://get.dashcaddy.net/release/latest.tar.gz"
readonly DASHCADDY_REPO="" # Set to a git URL to clone instead of downloading readonly DASHCADDY_REPO="" # Set to a git URL to clone instead of downloading
readonly INSTALL_DIR="/etc/dashcaddy" readonly INSTALL_DIR="/etc/dashcaddy"
@@ -388,6 +388,7 @@ EOF
create_directories() { create_directories() {
mkdir -p "$INSTALL_DIR" "$DOCKER_DATA" "$SITES_DIR" "$API_DIR" "$DASHBOARD_DIR" "${DASHBOARD_DIR}/assets" mkdir -p "$INSTALL_DIR" "$DOCKER_DATA" "$SITES_DIR" "$API_DIR" "$DASHBOARD_DIR" "${DASHBOARD_DIR}/assets"
mkdir -p /opt/dashcaddy/updates /opt/dashcaddy/scripts
ok "Directories created" ok "Directories created"
} }
@@ -444,6 +445,19 @@ fetch_source() {
done done
ok "Dashboard files deployed" ok "Dashboard files deployed"
# Deploy updater scripts
local scripts_src=""
for try in "${api_src}/scripts" "${tmp_src}/scripts"; do
[[ -d "$try" ]] && scripts_src="$try" && break
done
if [[ -n "$scripts_src" ]]; then
cp -f "${scripts_src}/dashcaddy-update.sh" /opt/dashcaddy/scripts/ 2>/dev/null || true
cp -f "${scripts_src}/dashcaddy-updater.path" /opt/dashcaddy/scripts/ 2>/dev/null || true
cp -f "${scripts_src}/dashcaddy-updater.service" /opt/dashcaddy/scripts/ 2>/dev/null || true
chmod +x /opt/dashcaddy/scripts/dashcaddy-update.sh 2>/dev/null || true
ok "Updater scripts deployed"
fi
# Cleanup # Cleanup
[[ -z "$SOURCE_PATH" ]] && rm -rf "$tmp_src" [[ -z "$SOURCE_PATH" ]] && rm -rf "$tmp_src"
} }
@@ -630,6 +644,8 @@ services:
- ${INSTALL_DIR}/totp-config.json:/app/totp-config.json:rw - ${INSTALL_DIR}/totp-config.json:/app/totp-config.json:rw
- ${INSTALL_DIR}/notifications.json:/app/notifications.json:rw - ${INSTALL_DIR}/notifications.json:/app/notifications.json:rw
- ${DASHBOARD_DIR}/assets:/app/assets:rw - ${DASHBOARD_DIR}/assets:/app/assets:rw
- ${DASHBOARD_DIR}:/app/dashboard:rw
- /opt/dashcaddy/updates:/app/updates:rw
- /var/run/docker.sock:/var/run/docker.sock - /var/run/docker.sock:/var/run/docker.sock
environment: environment:
- CADDYFILE_PATH=/caddyfile - CADDYFILE_PATH=/caddyfile
@@ -641,6 +657,12 @@ services:
- DNS_CREDENTIALS_FILE=/app/dns-credentials.json - DNS_CREDENTIALS_FILE=/app/dns-credentials.json
- HOST_LAN_IP=${LAN_IP} - HOST_LAN_IP=${LAN_IP}
- NODE_ENV=production - NODE_ENV=production
- DASHCADDY_UPDATE_ENABLED=true
- DASHCADDY_UPDATE_URL=https://get.dashcaddy.net/release
- DASHCADDY_MIRROR_URL=https://get2.dashcaddy.net/release
- DASHCADDY_UPDATES_DIR=/app/updates
- DASHCADDY_API_SOURCE_DIR=${API_DIR}
- DASHCADDY_FRONTEND_DIR=/app/dashboard
extra_hosts: extra_hosts:
- "host.docker.internal:host-gateway" - "host.docker.internal:host-gateway"
restart: unless-stopped restart: unless-stopped
@@ -654,6 +676,57 @@ DCEOF
ok "docker-compose.yml generated" ok "docker-compose.yml generated"
} }
# ============================================================================
# Auto-Updater Service
# ============================================================================
install_updater_service() {
local scripts_dir="/opt/dashcaddy/scripts"
if [[ ! -f "${scripts_dir}/dashcaddy-update.sh" ]]; then
warn "Updater script not found — skipping auto-update service"
return
fi
# Install systemd units
if [[ -f "${scripts_dir}/dashcaddy-updater.path" ]]; then
cp -f "${scripts_dir}/dashcaddy-updater.path" /etc/systemd/system/
else
cat > /etc/systemd/system/dashcaddy-updater.path <<'PATHEOF'
[Unit]
Description=Watch for DashCaddy update trigger
[Path]
PathChanged=/opt/dashcaddy/updates/trigger.json
MakeDirectory=yes
[Install]
WantedBy=multi-user.target
PATHEOF
fi
if [[ -f "${scripts_dir}/dashcaddy-updater.service" ]]; then
cp -f "${scripts_dir}/dashcaddy-updater.service" /etc/systemd/system/
else
cat > /etc/systemd/system/dashcaddy-updater.service <<'SVCEOF'
[Unit]
Description=DashCaddy auto-update handler
After=docker.service
Requires=docker.service
[Service]
Type=oneshot
ExecStart=/opt/dashcaddy/scripts/dashcaddy-update.sh
TimeoutStartSec=300
StandardOutput=journal
StandardError=journal
SyslogIdentifier=dashcaddy-update
SVCEOF
fi
systemctl daemon-reload
systemctl enable dashcaddy-updater.path >/dev/null 2>&1
systemctl start dashcaddy-updater.path >/dev/null 2>&1
ok "Auto-updater service installed"
}
# ============================================================================ # ============================================================================
# Build & Launch # Build & Launch
# ============================================================================ # ============================================================================
@@ -759,6 +832,13 @@ do_uninstall() {
docker rm -f "$CONTAINER_NAME" 2>/dev/null && ok "Container removed" || true docker rm -f "$CONTAINER_NAME" 2>/dev/null && ok "Container removed" || true
# Stop and remove updater service
systemctl stop dashcaddy-updater.path 2>/dev/null || true
systemctl disable dashcaddy-updater.path 2>/dev/null || true
rm -f /etc/systemd/system/dashcaddy-updater.path /etc/systemd/system/dashcaddy-updater.service
systemctl daemon-reload 2>/dev/null || true
rm -rf /opt/dashcaddy && ok "Updater service removed" || true
if [[ -L /etc/caddy/Caddyfile ]] && readlink /etc/caddy/Caddyfile | grep -q dashcaddy; then if [[ -L /etc/caddy/Caddyfile ]] && readlink /etc/caddy/Caddyfile | grep -q dashcaddy; then
rm -f /etc/caddy/Caddyfile rm -f /etc/caddy/Caddyfile
[[ -f /etc/caddy/Caddyfile.original ]] && mv /etc/caddy/Caddyfile.original /etc/caddy/Caddyfile [[ -f /etc/caddy/Caddyfile.original ]] && mv /etc/caddy/Caddyfile.original /etc/caddy/Caddyfile
@@ -949,6 +1029,7 @@ main() {
# ---- Step 6: Build & start ---- # ---- Step 6: Build & start ----
step "Building & starting services" step "Building & starting services"
build_and_start build_and_start
install_updater_service
# ---- Step 7: Start Caddy ---- # ---- Step 7: Start Caddy ----
step "Starting web server" step "Starting web server"

View File

@@ -12,6 +12,7 @@
<button class="panel-tab active" data-panel="updates-available">Available</button> <button class="panel-tab active" data-panel="updates-available">Available</button>
<button class="panel-tab" data-panel="updates-history">History</button> <button class="panel-tab" data-panel="updates-history">History</button>
<button class="panel-tab" data-panel="updates-auto">Auto-Update</button> <button class="panel-tab" data-panel="updates-auto">Auto-Update</button>
<button class="panel-tab" data-panel="updates-dashcaddy" id="updates-dashcaddy-tab">DashCaddy</button>
</div> </div>
<!-- Tab: Available Updates --> <!-- Tab: Available Updates -->
@@ -38,6 +39,32 @@
</div> </div>
</div> </div>
<!-- Tab: DashCaddy Self-Update -->
<div id="updates-dashcaddy" class="panel-section">
<div id="dashcaddy-version-info" style="display: flex; align-items: center; gap: 16px; margin-bottom: 16px; padding: 12px; border-radius: 8px; background: var(--bg);">
<div style="flex: 1;">
<div style="font-weight: 600; font-size: 1rem;">DashCaddy</div>
<div id="dashcaddy-current-version" style="color: var(--muted); font-size: 0.85rem;">Loading...</div>
</div>
<div id="dashcaddy-update-badge" style="display: none; padding: 4px 12px; border-radius: 12px; font-size: 0.78rem; font-weight: 600; background: var(--accent); color: var(--bg);">Update available</div>
</div>
<div id="dashcaddy-update-details" style="display: none; margin-bottom: 16px; padding: 12px; border-radius: 8px; border: 1px solid var(--border);">
<div style="display: flex; justify-content: space-between; align-items: center; margin-bottom: 8px;">
<span style="font-weight: 600;">New version: <span id="dashcaddy-new-version"></span></span>
<button id="dashcaddy-apply-btn" class="btn-accent-solid" style="padding: 6px 16px; font-size: 0.85rem;">Update Now</button>
</div>
<div id="dashcaddy-changelog" style="font-size: 0.8rem; color: var(--muted); max-height: 120px; overflow-y: auto; white-space: pre-wrap; font-family: var(--font-mono); line-height: 1.5;"></div>
</div>
<div id="dashcaddy-status-bar" style="display: none; margin-bottom: 16px; padding: 10px 12px; border-radius: 8px; font-size: 0.85rem;"></div>
<div style="margin-bottom: 12px;">
<button id="dashcaddy-check-btn" style="padding: 6px 14px; font-size: 0.82rem;">Check for Updates</button>
<button id="dashcaddy-rollback-btn" style="padding: 6px 14px; font-size: 0.82rem; margin-left: 6px;">Rollback</button>
</div>
<div id="dashcaddy-history-container" style="max-height: 250px; overflow-y: auto;">
<div class="panel-empty"><span class="empty-icon">📦</span>No self-update history.</div>
</div>
</div>
<div class="panel-bottom-bar"> <div class="panel-bottom-bar">
<span id="updates-last-check" class="text-auto-right"></span> <span id="updates-last-check" class="text-auto-right"></span>
</div> </div>
@@ -261,6 +288,187 @@
} }
} }
// ===== DASHCADDY SELF-UPDATE =====
const dcVersionInfo = document.getElementById('dashcaddy-current-version');
const dcUpdateBadge = document.getElementById('dashcaddy-update-badge');
const dcUpdateDetails = document.getElementById('dashcaddy-update-details');
const dcNewVersion = document.getElementById('dashcaddy-new-version');
const dcChangelog = document.getElementById('dashcaddy-changelog');
const dcApplyBtn = document.getElementById('dashcaddy-apply-btn');
const dcCheckBtn = document.getElementById('dashcaddy-check-btn');
const dcRollbackBtn = document.getElementById('dashcaddy-rollback-btn');
const dcStatusBar = document.getElementById('dashcaddy-status-bar');
const dcHistoryContainer = document.getElementById('dashcaddy-history-container');
let dcLastCheck = null;
function dcShowStatus(msg, type) {
if (!dcStatusBar) return;
dcStatusBar.style.display = 'block';
dcStatusBar.style.background = type === 'error' ? 'var(--bad-bg)' : type === 'success' ? 'var(--ok-bg)' : 'var(--bg)';
dcStatusBar.style.color = type === 'error' ? 'var(--bad-fg)' : type === 'success' ? 'var(--ok-fg)' : 'var(--fg)';
dcStatusBar.textContent = msg;
}
async function dcLoadVersion() {
try {
const res = await fetch('/api/v1/system/version');
const data = await res.json();
if (data.success) {
dcVersionInfo.textContent = 'v' + data.version + (data.commit ? ' (' + data.commit.substring(0, 7) + ')' : '');
}
} catch (_) {
dcVersionInfo.textContent = 'Unable to fetch version';
}
}
async function dcCheckForUpdate(silent) {
if (!silent) {
dcCheckBtn.textContent = 'Checking...';
dcCheckBtn.disabled = true;
}
try {
const res = await fetch('/api/v1/system/update-check');
const data = await res.json();
dcLastCheck = data;
if (data.success && data.available && data.remote) {
dcUpdateBadge.style.display = '';
dcUpdateDetails.style.display = '';
dcNewVersion.textContent = 'v' + data.remote.version;
dcChangelog.textContent = data.remote.changelog || 'No changelog available.';
// Add notification dot to the Updates button
const updatesBtn = document.getElementById('updates-btn');
if (updatesBtn && !updatesBtn.querySelector('.update-dot')) {
const dot = document.createElement('span');
dot.className = 'update-dot';
dot.style.cssText = 'position:absolute;top:2px;right:2px;width:8px;height:8px;border-radius:50%;background:var(--accent);';
updatesBtn.style.position = 'relative';
updatesBtn.appendChild(dot);
}
// Also add dot to the DashCaddy tab
const dcTab = document.getElementById('updates-dashcaddy-tab');
if (dcTab && !dcTab.querySelector('.update-dot')) {
const dot = document.createElement('span');
dot.className = 'update-dot';
dot.style.cssText = 'display:inline-block;width:6px;height:6px;border-radius:50%;background:var(--accent);margin-left:4px;vertical-align:middle;';
dcTab.appendChild(dot);
}
} else {
dcUpdateBadge.style.display = 'none';
dcUpdateDetails.style.display = 'none';
if (!silent) dcShowStatus('You are running the latest version.', 'success');
}
if (!silent) {
dcCheckBtn.textContent = 'Check for Updates';
dcCheckBtn.disabled = false;
}
} catch (e) {
if (!silent) {
dcShowStatus('Failed to check: ' + e.message, 'error');
dcCheckBtn.textContent = 'Check for Updates';
dcCheckBtn.disabled = false;
}
}
}
async function dcApplyUpdate() {
if (!confirm('Apply DashCaddy update? The API container will restart.')) return;
dcApplyBtn.textContent = 'Updating...';
dcApplyBtn.disabled = true;
dcShowStatus('Downloading and applying update...', 'info');
try {
const res = await secureFetch('/api/v1/system/update-apply', { method: 'POST' });
const data = await res.json();
if (data.success) {
dcShowStatus('Update initiated: v' + (data.fromVersion || '?') + ' → v' + (data.toVersion || '?') + '. The container will restart shortly.', 'success');
dcApplyBtn.textContent = 'Applied!';
// Remove notification dots
document.querySelectorAll('.update-dot').forEach(d => d.remove());
} else {
throw new Error(data.error || 'Update failed');
}
} catch (e) {
dcShowStatus('Update failed: ' + e.message, 'error');
dcApplyBtn.textContent = 'Update Now';
dcApplyBtn.disabled = false;
}
}
async function dcLoadHistory() {
try {
const res = await fetch('/api/v1/system/update-history');
const data = await res.json();
const history = data.success && data.history ? data.history : [];
if (history.length === 0) {
dcHistoryContainer.innerHTML = '<div class="panel-empty"><span class="empty-icon">📦</span>No self-update history.</div>';
return;
}
let html = '<table style="width: 100%; border-collapse: collapse; font-size: 0.82rem;">';
html += '<tr style="border-bottom: 1px solid var(--border); color: var(--muted);"><th style="padding: 6px; text-align: left;">When</th><th style="padding: 6px; text-align: left;">Version</th><th style="padding: 6px; text-align: left;">From</th><th style="padding: 6px; text-align: left;">Status</th></tr>';
for (const h of history) {
const st = h.status === 'success' ? '✓ success' : h.status === 'pending' ? '⏳ pending' : h.status === 'partial' ? '⚠ partial' : '✗ ' + h.status;
const stColor = h.status === 'success' ? 'var(--ok-fg)' : h.status === 'pending' ? 'var(--muted)' : 'var(--bad-fg)';
html += '<tr style="border-bottom: 1px solid var(--border);">';
html += '<td style="padding: 6px; color: var(--muted);">' + timeAgo(h.timestamp) + '</td>';
html += '<td style="padding: 6px; font-weight: 500;">v' + escapeHtml(h.version) + (h.rollback ? ' (rollback)' : '') + '</td>';
html += '<td style="padding: 6px; color: var(--muted);">v' + escapeHtml(h.fromVersion || '?') + '</td>';
html += '<td style="padding: 6px;"><span style="color: ' + stColor + ';">' + st + '</span></td>';
html += '</tr>';
if (h.error) {
html += '<tr><td colspan="4" style="padding: 4px 6px 8px; font-size: 0.78rem; color: var(--bad-fg);">' + escapeHtml(h.error) + '</td></tr>';
}
if (h.note) {
html += '<tr><td colspan="4" style="padding: 4px 6px 8px; font-size: 0.78rem; color: var(--muted);">' + escapeHtml(h.note) + '</td></tr>';
}
}
html += '</table>';
dcHistoryContainer.innerHTML = html;
} catch (e) {
dcHistoryContainer.innerHTML = '<div class="panel-empty" style="color: var(--bad-fg);">Failed: ' + escapeHtml(e.message) + '</div>';
}
}
async function dcShowRollback() {
try {
const res = await fetch('/api/v1/system/rollback-versions');
const data = await res.json();
const versions = data.success && data.versions ? data.versions : [];
if (versions.length === 0) {
showNotification('No rollback versions available.', 'info');
return;
}
const version = prompt('Available rollback versions:\n' + versions.join('\n') + '\n\nEnter version to rollback to:');
if (!version) return;
if (!versions.includes(version)) {
showNotification('Invalid version: ' + version, 'error');
return;
}
if (!confirm('Rollback DashCaddy to v' + version + '? The container will restart.')) return;
dcShowStatus('Rolling back to v' + version + '...', 'info');
const r = await secureFetch('/api/v1/system/rollback', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ version })
});
const d = await r.json();
if (d.success) {
dcShowStatus('Rollback to v' + version + ' initiated. Container will restart.', 'success');
} else {
throw new Error(d.error || 'Rollback failed');
}
} catch (e) {
dcShowStatus('Rollback failed: ' + e.message, 'error');
}
}
dcCheckBtn?.addEventListener('click', () => dcCheckForUpdate(false));
dcApplyBtn?.addEventListener('click', dcApplyUpdate);
dcRollbackBtn?.addEventListener('click', dcShowRollback);
checkBtn?.addEventListener('click', checkForUpdates); checkBtn?.addEventListener('click', checkForUpdates);
openBtn?.addEventListener('click', () => { openBtn?.addEventListener('click', () => {
modal?.classList.add('show'); modal?.classList.add('show');
@@ -271,4 +479,12 @@
// Lazy-load tabs // Lazy-load tabs
document.querySelector('[data-panel="updates-history"]')?.addEventListener('click', loadHistory); document.querySelector('[data-panel="updates-history"]')?.addEventListener('click', loadHistory);
document.querySelector('[data-panel="updates-auto"]')?.addEventListener('click', loadAutoConfig); document.querySelector('[data-panel="updates-auto"]')?.addEventListener('click', loadAutoConfig);
document.querySelector('[data-panel="updates-dashcaddy"]')?.addEventListener('click', () => {
dcLoadVersion();
dcLoadHistory();
if (!dcLastCheck) dcCheckForUpdate(true);
});
// Non-blocking check on page load — just adds notification dot if update available
setTimeout(() => dcCheckForUpdate(true), 5000);
})(); })();