feat: add host-side auto-updater for zero-touch API container rebuilds
When the in-container self-updater downloads a new version, it writes trigger.json. The new systemd path unit watches for this file and runs dashcaddy-update.sh, which backs up current API files, copies the new ones, rebuilds the container, verifies health, and writes result.json. Automatic rollback on build or health check failure. Also fixes undefined `isWindows` variable in self-updater.js and adds DASHCADDY_HOST_UPDATES_DIR env var to the installer's docker-compose template for correct container-to-host path translation. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
208
dashcaddy-api/scripts/dashcaddy-update.sh
Normal file
208
dashcaddy-api/scripts/dashcaddy-update.sh
Normal file
@@ -0,0 +1,208 @@
|
||||
#!/usr/bin/env bash
|
||||
# DashCaddy Host-Side Updater
|
||||
# Triggered by systemd path unit when the container writes trigger.json.
|
||||
# Reads the trigger, backs up current API, copies new files, rebuilds container.
|
||||
# Writes result.json so the new container knows the outcome.
|
||||
#
|
||||
# This runs on the HOST, outside the container.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
readonly UPDATES_DIR="/opt/dashcaddy/updates"
|
||||
readonly TRIGGER_FILE="${UPDATES_DIR}/trigger.json"
|
||||
readonly RESULT_FILE="${UPDATES_DIR}/result.json"
|
||||
readonly BACKUPS_DIR="${UPDATES_DIR}/backups"
|
||||
readonly CONTAINER_NAME="dashcaddy-api"
|
||||
readonly MAX_BACKUPS=3
|
||||
readonly HEALTH_TIMEOUT=60
|
||||
|
||||
log() { echo "[dashcaddy-update] $(date '+%Y-%m-%d %H:%M:%S') $*"; }
|
||||
|
||||
write_result() {
|
||||
local success="$1" version="$2" duration="$3"
|
||||
shift 3
|
||||
local error="${1:-}"
|
||||
|
||||
if [[ "$success" == "true" ]]; then
|
||||
cat > "$RESULT_FILE" <<EOF
|
||||
{
|
||||
"success": true,
|
||||
"version": "${version}",
|
||||
"duration": ${duration},
|
||||
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
||||
}
|
||||
EOF
|
||||
else
|
||||
cat > "$RESULT_FILE" <<EOF
|
||||
{
|
||||
"success": false,
|
||||
"version": "${version}",
|
||||
"duration": ${duration},
|
||||
"error": "${error}",
|
||||
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
||||
}
|
||||
EOF
|
||||
fi
|
||||
}
|
||||
|
||||
cleanup_old_backups() {
|
||||
local count
|
||||
count=$(find "$BACKUPS_DIR" -maxdepth 1 -mindepth 1 -type d 2>/dev/null | wc -l)
|
||||
if (( count > MAX_BACKUPS )); then
|
||||
log "Cleaning old backups (${count} > ${MAX_BACKUPS})"
|
||||
find "$BACKUPS_DIR" -maxdepth 1 -mindepth 1 -type d -printf '%T+ %p\n' \
|
||||
| sort | head -n $(( count - MAX_BACKUPS )) | cut -d' ' -f2- \
|
||||
| xargs rm -rf
|
||||
fi
|
||||
}
|
||||
|
||||
wait_for_health() {
|
||||
local port="${1:-3001}"
|
||||
local timeout="$HEALTH_TIMEOUT"
|
||||
local elapsed=0
|
||||
|
||||
log "Waiting for health check (timeout: ${timeout}s)..."
|
||||
while (( elapsed < timeout )); do
|
||||
if curl -fsSL --max-time 3 "http://localhost:${port}/health" &>/dev/null; then
|
||||
log "Health check passed after ${elapsed}s"
|
||||
return 0
|
||||
fi
|
||||
sleep 2
|
||||
elapsed=$(( elapsed + 2 ))
|
||||
done
|
||||
|
||||
log "Health check FAILED after ${timeout}s"
|
||||
return 1
|
||||
}
|
||||
|
||||
main() {
|
||||
local start_time
|
||||
start_time=$(date +%s)
|
||||
|
||||
# 1. Read trigger
|
||||
if [[ ! -f "$TRIGGER_FILE" ]]; then
|
||||
log "No trigger file found — nothing to do"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Parse trigger.json (uses python3 which is available on all supported distros)
|
||||
local action version from_version staging_dir api_source_dir
|
||||
action=$(python3 -c "import json; print(json.load(open('${TRIGGER_FILE}'))['action'])")
|
||||
version=$(python3 -c "import json; print(json.load(open('${TRIGGER_FILE}'))['version'])")
|
||||
from_version=$(python3 -c "import json; print(json.load(open('${TRIGGER_FILE}'))['fromVersion'])")
|
||||
staging_dir=$(python3 -c "import json; print(json.load(open('${TRIGGER_FILE}'))['stagingDir'])")
|
||||
api_source_dir=$(python3 -c "import json; print(json.load(open('${TRIGGER_FILE}'))['apiSourceDir'])")
|
||||
|
||||
log "=== ${action^^}: v${from_version} -> v${version} ==="
|
||||
log "Staging: ${staging_dir}"
|
||||
log "API source: ${api_source_dir}"
|
||||
|
||||
# Consume the trigger immediately so we don't re-process on failure
|
||||
mv "$TRIGGER_FILE" "${TRIGGER_FILE}.processing"
|
||||
|
||||
# 2. Validate staging directory
|
||||
if [[ ! -d "$staging_dir" ]]; then
|
||||
log "ERROR: Staging directory not found: ${staging_dir}"
|
||||
write_result "false" "$version" "$(( $(date +%s) - start_time ))" "Staging directory not found"
|
||||
rm -f "${TRIGGER_FILE}.processing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 3. Backup current API files
|
||||
local backup_dir="${BACKUPS_DIR}/${from_version}"
|
||||
mkdir -p "$backup_dir"
|
||||
log "Backing up current API files to ${backup_dir}"
|
||||
|
||||
# Copy all JS files, package.json, Dockerfile, and routes
|
||||
for item in "$api_source_dir"/*.js "$api_source_dir"/package.json "$api_source_dir"/package-lock.json "$api_source_dir"/Dockerfile "$api_source_dir"/openapi.yaml; do
|
||||
[[ -f "$item" ]] && cp -f "$item" "$backup_dir/" 2>/dev/null || true
|
||||
done
|
||||
[[ -d "$api_source_dir/routes" ]] && cp -rf "$api_source_dir/routes" "$backup_dir/"
|
||||
# Save version marker
|
||||
echo "$from_version" > "$backup_dir/VERSION"
|
||||
|
||||
cleanup_old_backups
|
||||
|
||||
# 4. Copy new files from staging to API source
|
||||
log "Deploying new API files..."
|
||||
for item in "$staging_dir"/*.js "$staging_dir"/package.json "$staging_dir"/package-lock.json "$staging_dir"/Dockerfile "$staging_dir"/openapi.yaml; do
|
||||
[[ -f "$item" ]] && cp -f "$item" "$api_source_dir/" 2>/dev/null || true
|
||||
done
|
||||
[[ -d "$staging_dir/routes" ]] && cp -rf "$staging_dir/routes" "$api_source_dir/routes/"
|
||||
|
||||
# 5. Rebuild container
|
||||
log "Rebuilding container..."
|
||||
cd "$api_source_dir"
|
||||
|
||||
local build_ok=false
|
||||
if docker compose build --quiet 2>&1; then
|
||||
build_ok=true
|
||||
elif docker-compose build --quiet 2>&1; then
|
||||
build_ok=true
|
||||
fi
|
||||
|
||||
if [[ "$build_ok" != "true" ]]; then
|
||||
log "ERROR: Docker build failed — rolling back"
|
||||
|
||||
# Restore backup
|
||||
for item in "$backup_dir"/*.js "$backup_dir"/package.json "$backup_dir"/package-lock.json "$backup_dir"/Dockerfile "$backup_dir"/openapi.yaml; do
|
||||
[[ -f "$item" ]] && cp -f "$item" "$api_source_dir/" 2>/dev/null || true
|
||||
done
|
||||
[[ -d "$backup_dir/routes" ]] && cp -rf "$backup_dir/routes" "$api_source_dir/routes/"
|
||||
|
||||
write_result "false" "$version" "$(( $(date +%s) - start_time ))" "Docker build failed"
|
||||
rm -f "${TRIGGER_FILE}.processing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 6. Restart container
|
||||
log "Restarting container..."
|
||||
if docker compose up -d 2>&1 || docker-compose up -d 2>&1; then
|
||||
log "Container restarted"
|
||||
else
|
||||
log "ERROR: Container restart failed — rolling back"
|
||||
|
||||
# Restore backup
|
||||
for item in "$backup_dir"/*.js "$backup_dir"/package.json "$backup_dir"/package-lock.json "$backup_dir"/Dockerfile "$backup_dir"/openapi.yaml; do
|
||||
[[ -f "$item" ]] && cp -f "$item" "$api_source_dir/" 2>/dev/null || true
|
||||
done
|
||||
[[ -d "$backup_dir/routes" ]] && cp -rf "$backup_dir/routes" "$api_source_dir/routes/"
|
||||
|
||||
docker compose build --quiet 2>&1 || docker-compose build --quiet 2>&1 || true
|
||||
docker compose up -d 2>&1 || docker-compose up -d 2>&1 || true
|
||||
|
||||
write_result "false" "$version" "$(( $(date +%s) - start_time ))" "Container restart failed"
|
||||
rm -f "${TRIGGER_FILE}.processing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 7. Health check
|
||||
if wait_for_health; then
|
||||
local duration=$(( $(date +%s) - start_time ))
|
||||
log "=== Update successful: v${version} in ${duration}s ==="
|
||||
write_result "true" "$version" "$duration"
|
||||
else
|
||||
local duration=$(( $(date +%s) - start_time ))
|
||||
log "ERROR: Health check failed after update — rolling back"
|
||||
|
||||
# Restore backup
|
||||
for item in "$backup_dir"/*.js "$backup_dir"/package.json "$backup_dir"/package-lock.json "$backup_dir"/Dockerfile "$backup_dir"/openapi.yaml; do
|
||||
[[ -f "$item" ]] && cp -f "$item" "$api_source_dir/" 2>/dev/null || true
|
||||
done
|
||||
[[ -d "$backup_dir/routes" ]] && cp -rf "$backup_dir/routes" "$api_source_dir/routes/"
|
||||
|
||||
docker compose build --quiet 2>&1 || docker-compose build --quiet 2>&1 || true
|
||||
docker compose up -d 2>&1 || docker-compose up -d 2>&1 || true
|
||||
wait_for_health || log "WARNING: Rollback health check also failed"
|
||||
|
||||
write_result "false" "$version" "$duration" "Health check failed after update"
|
||||
fi
|
||||
|
||||
# 8. Cleanup
|
||||
rm -f "${TRIGGER_FILE}.processing"
|
||||
rm -rf "${UPDATES_DIR}/staging" 2>/dev/null || true
|
||||
|
||||
log "=== Update process complete ==="
|
||||
}
|
||||
|
||||
main "$@"
|
||||
9
dashcaddy-api/scripts/dashcaddy-updater.path
Normal file
9
dashcaddy-api/scripts/dashcaddy-updater.path
Normal file
@@ -0,0 +1,9 @@
|
||||
[Unit]
|
||||
Description=Watch for DashCaddy update trigger
|
||||
|
||||
[Path]
|
||||
PathChanged=/opt/dashcaddy/updates/trigger.json
|
||||
MakeDirectory=yes
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
12
dashcaddy-api/scripts/dashcaddy-updater.service
Normal file
12
dashcaddy-api/scripts/dashcaddy-updater.service
Normal file
@@ -0,0 +1,12 @@
|
||||
[Unit]
|
||||
Description=DashCaddy auto-update handler
|
||||
After=docker.service
|
||||
Requires=docker.service
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart=/opt/dashcaddy/scripts/dashcaddy-update.sh
|
||||
TimeoutStartSec=300
|
||||
StandardOutput=journal
|
||||
StandardError=journal
|
||||
SyslogIdentifier=dashcaddy-update
|
||||
@@ -17,6 +17,7 @@ const crypto = require('crypto');
|
||||
const { execSync } = require('child_process');
|
||||
const zlib = require('zlib');
|
||||
const platformPaths = require('./platform-paths');
|
||||
const isWindows = platformPaths.isWindows;
|
||||
|
||||
const DEFAULTS = {
|
||||
CHECK_INTERVAL: 30 * 60 * 1000, // 30 minutes
|
||||
|
||||
@@ -661,6 +661,7 @@ services:
|
||||
- DASHCADDY_UPDATE_URL=https://get.dashcaddy.net/release
|
||||
- DASHCADDY_MIRROR_URL=https://get2.dashcaddy.net/release
|
||||
- DASHCADDY_UPDATES_DIR=/app/updates
|
||||
- DASHCADDY_HOST_UPDATES_DIR=/opt/dashcaddy/updates
|
||||
- DASHCADDY_API_SOURCE_DIR=${API_DIR}
|
||||
- DASHCADDY_FRONTEND_DIR=/app/dashboard
|
||||
extra_hosts:
|
||||
|
||||
Reference in New Issue
Block a user