Add auto-update system for DashCaddy instances

- self-updater.js: polls for new versions, downloads/verifies tarballs,
  triggers host-side rebuild via systemd path unit
- dashcaddy-update.sh + systemd units: host-side container rebuild with
  automatic rollback on health check failure
- 7 new /api/v1/system/* endpoints for version info, update check/apply,
  rollback, and update history
- Frontend: DashCaddy tab in Updates modal with version display,
  changelog, update button, rollback, and notification dot
- install.sh: updater service installation, volume mounts, env vars
- build-release.sh + webhook-handler.js: release server pipeline
  (Gitea webhook → build tarball → deploy to get.dashcaddy.net)
- Dockerfile: DASHCADDY_COMMIT build arg → VERSION file
- Version bump to 1.1.0

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-07 03:11:35 -08:00
parent 9a0abc02d1
commit ffa6966fd3
14 changed files with 1395 additions and 4 deletions

View File

@@ -0,0 +1,111 @@
#!/usr/bin/env bash
# DashCaddy Release Builder
# Triggered by Gitea webhook on push to main.
# Clones repo, builds tarball, writes version.json, deploys to web root.
set -euo pipefail
readonly REPO_URL="http://100.98.123.59:3000/sami7777/dashcaddy.git"
readonly RELEASE_DIR="/var/www/get.dashcaddy.net/release"
readonly BUILD_DIR="/tmp/dashcaddy-build-$$"
readonly MIRROR_HOST="root@100.98.123.59" # Contabo DE
readonly BRANCH="main"
log() { echo "[build-release] $(date '+%Y-%m-%d %H:%M:%S') $*"; }
cleanup() { rm -rf "$BUILD_DIR"; }
trap cleanup EXIT
main() {
log "=== Starting release build ==="
# 1. Clone latest
mkdir -p "$BUILD_DIR"
log "Cloning ${BRANCH}..."
git clone --depth 1 --branch "$BRANCH" "$REPO_URL" "$BUILD_DIR/repo" 2>&1
cd "$BUILD_DIR/repo"
local commit
commit=$(git rev-parse --short HEAD)
log "Commit: ${commit}"
# 2. Read version from package.json
local version
version=$(python3 -c "import json; print(json.load(open('dashcaddy-api/package.json'))['version'])")
log "Version: ${version}"
# 3. Build changelog (last 10 commits, one-liner)
local changelog
changelog=$(git log --oneline -10 --no-decorate 2>/dev/null || echo "${commit} (no log)")
# 4. Assemble tarball contents
local staging="$BUILD_DIR/dashcaddy"
mkdir -p "$staging/dashcaddy-api/routes" "$staging/status" "$staging/scripts"
# API files
cp -f dashcaddy-api/*.js "$staging/dashcaddy-api/" 2>/dev/null || true
cp -rf dashcaddy-api/routes/* "$staging/dashcaddy-api/routes/" 2>/dev/null || true
cp -f dashcaddy-api/package.json "$staging/dashcaddy-api/"
cp -f dashcaddy-api/package-lock.json "$staging/dashcaddy-api/" 2>/dev/null || true
cp -f dashcaddy-api/Dockerfile "$staging/dashcaddy-api/"
cp -f dashcaddy-api/openapi.yaml "$staging/dashcaddy-api/" 2>/dev/null || true
# Dashboard files
cp -f status/index.html "$staging/status/"
cp -f status/sw.js "$staging/status/" 2>/dev/null || true
for dir in css js dist vendor assets; do
[ -d "status/${dir}" ] && cp -rf "status/${dir}" "$staging/status/"
done
# Updater scripts
cp -f dashcaddy-api/scripts/dashcaddy-update.sh "$staging/scripts/" 2>/dev/null || true
cp -f dashcaddy-api/scripts/dashcaddy-updater.path "$staging/scripts/" 2>/dev/null || true
cp -f dashcaddy-api/scripts/dashcaddy-updater.service "$staging/scripts/" 2>/dev/null || true
# 5. Create tarball
local tarball="dashcaddy-${version}.tar.gz"
cd "$BUILD_DIR"
tar czf "$tarball" dashcaddy/
log "Tarball: ${tarball} ($(du -h "$tarball" | cut -f1))"
# 6. Compute SHA-256
local sha256
sha256=$(sha256sum "$tarball" | cut -d' ' -f1)
log "SHA-256: ${sha256}"
# 7. Write version.json
cat > version.json <<EOF
{
"version": "${version}",
"commit": "${commit}",
"date": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
"sha256": "${sha256}",
"changelog": $(python3 -c "import json; print(json.dumps('''${changelog}'''))"),
"breaking": false,
"tarball": "${tarball}"
}
EOF
# 8. Deploy to web root
mkdir -p "$RELEASE_DIR"
cp -f "$tarball" "$RELEASE_DIR/"
cp -f version.json "$RELEASE_DIR/"
# Also keep a "latest" symlink/copy
cp -f "$tarball" "$RELEASE_DIR/latest.tar.gz"
log "Deployed to ${RELEASE_DIR}"
# 9. Sync to mirror (Contabo DE)
if ssh -o ConnectTimeout=5 "$MIRROR_HOST" true 2>/dev/null; then
log "Syncing to mirror..."
rsync -az --timeout=30 "$RELEASE_DIR/" "$MIRROR_HOST:/var/www/get2.dashcaddy.net/release/" 2>&1 || {
log "WARNING: Mirror sync failed (non-fatal)"
}
log "Mirror synced"
else
log "WARNING: Mirror host unreachable, skipping sync"
fi
log "=== Release build complete: v${version} (${commit}) ==="
}
main "$@"

View File

@@ -0,0 +1,219 @@
#!/usr/bin/env bash
# DashCaddy Host-Side Updater
# Triggered by systemd path unit when the API container writes trigger.json.
# Handles API container rebuild + restart with automatic rollback on failure.
set -euo pipefail
readonly UPDATES_DIR="/opt/dashcaddy/updates"
readonly TRIGGER_FILE="${UPDATES_DIR}/trigger.json"
readonly RESULT_FILE="${UPDATES_DIR}/result.json"
readonly BACKUP_BASE="${UPDATES_DIR}/backups"
readonly HEALTH_URL="http://localhost:3001/health"
readonly HEALTH_TIMEOUT=60
readonly MAX_BACKUPS=3
log() { echo "[dashcaddy-update] $(date '+%Y-%m-%d %H:%M:%S') $*"; }
write_result() {
local success="$1" message="$2" version="$3" duration="$4"
cat > "$RESULT_FILE" <<EOF
{
"success": ${success},
"version": "${version}",
"message": "${message}",
"duration": ${duration},
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
}
EOF
log "Result written: success=${success} version=${version}"
}
backup_current() {
local api_dir="$1" version="$2"
local backup_dir="${BACKUP_BASE}/${version}"
mkdir -p "$backup_dir/routes"
cp -f "${api_dir}"/*.js "$backup_dir/" 2>/dev/null || true
cp -rf "${api_dir}/routes/"* "$backup_dir/routes/" 2>/dev/null || true
cp -f "${api_dir}/package.json" "$backup_dir/" 2>/dev/null || true
cp -f "${api_dir}/package-lock.json" "$backup_dir/" 2>/dev/null || true
cp -f "${api_dir}/Dockerfile" "$backup_dir/" 2>/dev/null || true
cp -f "${api_dir}/openapi.yaml" "$backup_dir/" 2>/dev/null || true
log "Backed up version ${version} to ${backup_dir}"
}
restore_backup() {
local api_dir="$1" version="$2"
local backup_dir="${BACKUP_BASE}/${version}"
if [ ! -d "$backup_dir" ]; then
log "ERROR: No backup found for version ${version}"
return 1
fi
cp -f "${backup_dir}"/*.js "$api_dir/" 2>/dev/null || true
cp -rf "${backup_dir}/routes/"* "${api_dir}/routes/" 2>/dev/null || true
cp -f "${backup_dir}/package.json" "$api_dir/" 2>/dev/null || true
cp -f "${backup_dir}/package-lock.json" "$api_dir/" 2>/dev/null || true
cp -f "${backup_dir}/Dockerfile" "$api_dir/" 2>/dev/null || true
cp -f "${backup_dir}/openapi.yaml" "$api_dir/" 2>/dev/null || true
log "Restored version ${version} from ${backup_dir}"
}
copy_new_files() {
local staging_dir="$1" api_dir="$2"
cp -f "${staging_dir}"/*.js "$api_dir/" 2>/dev/null || true
[ -d "${staging_dir}/routes" ] && cp -rf "${staging_dir}/routes/"* "${api_dir}/routes/" 2>/dev/null || true
cp -f "${staging_dir}/package.json" "$api_dir/" 2>/dev/null || true
cp -f "${staging_dir}/package-lock.json" "$api_dir/" 2>/dev/null || true
cp -f "${staging_dir}/Dockerfile" "$api_dir/" 2>/dev/null || true
cp -f "${staging_dir}/openapi.yaml" "$api_dir/" 2>/dev/null || true
log "Copied new files from ${staging_dir} to ${api_dir}"
}
wait_for_health() {
local attempt=0
local max_attempts=$((HEALTH_TIMEOUT / 2))
while (( attempt < max_attempts )); do
if curl -fsS --max-time 3 "$HEALTH_URL" >/dev/null 2>&1; then
log "Health check passed (attempt $((attempt+1)))"
return 0
fi
sleep 2
attempt=$((attempt + 1))
done
log "Health check FAILED after ${HEALTH_TIMEOUT}s"
return 1
}
find_compose_dir() {
# Find the docker-compose.yml for dashcaddy-api
for dir in /etc/dashcaddy/sites/dashcaddy-api /etc/dashcaddy/sites/caddy-api; do
if [ -f "${dir}/docker-compose.yml" ] || [ -f "${dir}/docker-compose.yaml" ]; then
echo "$dir"
return 0
fi
done
# Fallback: same as api source
echo "$1"
}
cleanup_old_backups() {
if [ ! -d "$BACKUP_BASE" ]; then return; fi
local count
count=$(ls -1d "${BACKUP_BASE}"/*/ 2>/dev/null | wc -l)
if (( count > MAX_BACKUPS )); then
local to_remove=$((count - MAX_BACKUPS))
ls -1d "${BACKUP_BASE}"/*/ 2>/dev/null | head -n "$to_remove" | while read -r dir; do
rm -rf "$dir"
log "Cleaned old backup: $dir"
done
fi
}
main() {
if [ ! -f "$TRIGGER_FILE" ]; then
log "No trigger file found, exiting"
exit 0
fi
local start_time
start_time=$(date +%s)
# Parse trigger file
local action version from_version staging_dir api_dir commit
action=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('action','update'))" 2>/dev/null || echo "update")
version=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('version','unknown'))" 2>/dev/null || echo "unknown")
from_version=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('fromVersion','unknown'))" 2>/dev/null || echo "unknown")
staging_dir=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('stagingDir',''))" 2>/dev/null || echo "")
api_dir=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('apiSourceDir','/opt/dashcaddy'))" 2>/dev/null || echo "/opt/dashcaddy")
commit=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('commit','unknown'))" 2>/dev/null || echo "unknown")
log "=== DashCaddy ${action} started: ${from_version}${version} (${commit}) ==="
if [ -z "$staging_dir" ] || [ ! -d "$staging_dir" ]; then
log "ERROR: Staging directory not found: ${staging_dir}"
write_result "false" "Staging directory not found" "$version" "0"
rm -f "$TRIGGER_FILE"
exit 1
fi
local compose_dir
compose_dir=$(find_compose_dir "$api_dir")
# Step 1: Backup current version
log "Step 1: Backing up current version (${from_version})"
backup_current "$api_dir" "$from_version"
# Step 2: Copy new files
log "Step 2: Copying new files"
copy_new_files "$staging_dir" "$api_dir"
# Write commit hash to VERSION file
echo "$commit" > "${api_dir}/VERSION"
# Step 3: Rebuild container
log "Step 3: Building new container image"
cd "$compose_dir"
if ! DASHCADDY_COMMIT="$commit" docker compose build --quiet 2>&1; then
log "ERROR: docker compose build failed, rolling back"
restore_backup "$api_dir" "$from_version"
local elapsed=$(( $(date +%s) - start_time ))
write_result "false" "Build failed, rolled back to ${from_version}" "$version" "$((elapsed * 1000))"
rm -f "$TRIGGER_FILE"
exit 1
fi
# Step 4: Restart container
log "Step 4: Restarting container"
if ! docker compose up -d 2>&1; then
log "ERROR: docker compose up failed, rolling back"
restore_backup "$api_dir" "$from_version"
docker compose build --quiet 2>&1 || true
docker compose up -d 2>&1 || true
local elapsed=$(( $(date +%s) - start_time ))
write_result "false" "Container start failed, rolled back to ${from_version}" "$version" "$((elapsed * 1000))"
rm -f "$TRIGGER_FILE"
exit 1
fi
# Step 5: Health check
log "Step 5: Waiting for health check (${HEALTH_TIMEOUT}s timeout)"
if wait_for_health; then
local elapsed=$(( $(date +%s) - start_time ))
log "=== Update to ${version} SUCCESSFUL (${elapsed}s) ==="
write_result "true" "Update successful" "$version" "$((elapsed * 1000))"
else
log "Health check failed — ROLLING BACK to ${from_version}"
restore_backup "$api_dir" "$from_version"
cd "$compose_dir"
docker compose build --quiet 2>&1 || true
docker compose up -d 2>&1 || true
if wait_for_health; then
local elapsed=$(( $(date +%s) - start_time ))
log "Rollback to ${from_version} succeeded"
write_result "false" "Health check failed after update. Rolled back to ${from_version}." "$version" "$((elapsed * 1000))"
else
local elapsed=$(( $(date +%s) - start_time ))
log "CRITICAL: Rollback also failed. Manual intervention required."
write_result "false" "CRITICAL: Both update and rollback failed. Manual intervention required." "$version" "$((elapsed * 1000))"
fi
fi
# Cleanup
rm -f "$TRIGGER_FILE"
rm -rf "${UPDATES_DIR}/staging"
cleanup_old_backups
log "Update process complete"
}
main "$@"

View File

@@ -0,0 +1,10 @@
[Unit]
Description=Watch for DashCaddy update trigger
Documentation=https://dashcaddy.net
[Path]
PathChanged=/opt/dashcaddy/updates/trigger.json
MakeDirectory=yes
[Install]
WantedBy=multi-user.target

View File

@@ -0,0 +1,13 @@
[Unit]
Description=DashCaddy auto-update handler
Documentation=https://dashcaddy.net
After=docker.service
Requires=docker.service
[Service]
Type=oneshot
ExecStart=/opt/dashcaddy/scripts/dashcaddy-update.sh
TimeoutStartSec=300
StandardOutput=journal
StandardError=journal
SyslogIdentifier=dashcaddy-update

View File

@@ -0,0 +1,136 @@
#!/usr/bin/env node
/**
* DashCaddy Release Webhook Handler
* Receives push webhooks from Gitea, verifies HMAC signature,
* and triggers build-release.sh.
*
* Usage: node webhook-handler.js
* Env vars:
* WEBHOOK_SECRET — Gitea webhook secret (required)
* WEBHOOK_PORT — Listen port (default: 9090)
* BUILD_SCRIPT — Path to build script (default: /opt/dashcaddy-release/build-release.sh)
*/
const http = require('http');
const crypto = require('crypto');
const { spawn } = require('child_process');
const fs = require('fs');
const PORT = parseInt(process.env.WEBHOOK_PORT || '9090', 10);
const SECRET = process.env.WEBHOOK_SECRET;
const BUILD_SCRIPT = process.env.BUILD_SCRIPT || '/opt/dashcaddy-release/build-release.sh';
const LOG_FILE = '/var/log/dashcaddy-release.log';
if (!SECRET) {
console.error('WEBHOOK_SECRET environment variable is required');
process.exit(1);
}
let buildRunning = false;
function log(msg) {
const line = `[webhook] ${new Date().toISOString()} ${msg}`;
console.log(line);
fs.appendFileSync(LOG_FILE, line + '\n');
}
function verifySignature(body, signature) {
if (!signature) return false;
const hmac = crypto.createHmac('sha256', SECRET).update(body).digest('hex');
return crypto.timingSafeEqual(
Buffer.from(signature),
Buffer.from(hmac)
);
}
function triggerBuild() {
if (buildRunning) {
log('Build already in progress, skipping');
return;
}
buildRunning = true;
log('Triggering build...');
const child = spawn('bash', [BUILD_SCRIPT], {
stdio: ['ignore', 'pipe', 'pipe'],
env: { ...process.env, PATH: process.env.PATH },
});
child.stdout.on('data', (data) => {
const lines = data.toString().trim().split('\n');
lines.forEach(line => log(`[build] ${line}`));
});
child.stderr.on('data', (data) => {
const lines = data.toString().trim().split('\n');
lines.forEach(line => log(`[build:err] ${line}`));
});
child.on('close', (code) => {
buildRunning = false;
if (code === 0) {
log('Build completed successfully');
} else {
log(`Build FAILED with exit code ${code}`);
}
});
}
const server = http.createServer((req, res) => {
// Health check
if (req.method === 'GET' && req.url === '/health') {
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ status: 'ok', buildRunning }));
return;
}
// Only accept POST to /webhook
if (req.method !== 'POST' || req.url !== '/webhook') {
res.writeHead(404);
res.end('Not found');
return;
}
let body = '';
req.on('data', chunk => { body += chunk; });
req.on('end', () => {
// Verify Gitea HMAC signature
const sig = req.headers['x-gitea-signature'] || '';
if (!verifySignature(body, sig)) {
log('Signature verification FAILED');
res.writeHead(403);
res.end('Invalid signature');
return;
}
try {
const payload = JSON.parse(body);
const ref = payload.ref || '';
const branch = ref.replace('refs/heads/', '');
if (branch !== 'main') {
log(`Ignoring push to ${branch} (not main)`);
res.writeHead(200);
res.end('Ignored (not main branch)');
return;
}
const pusher = payload.pusher?.login || 'unknown';
const commits = payload.commits?.length || 0;
log(`Push to main by ${pusher}: ${commits} commit(s)`);
triggerBuild();
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ accepted: true }));
} catch (e) {
log('Failed to parse webhook payload: ' + e.message);
res.writeHead(400);
res.end('Invalid payload');
}
});
});
server.listen(PORT, '0.0.0.0', () => {
log(`Webhook handler listening on 127.0.0.1:${PORT}`);
});