Add auto-update system for DashCaddy instances
- self-updater.js: polls for new versions, downloads/verifies tarballs, triggers host-side rebuild via systemd path unit - dashcaddy-update.sh + systemd units: host-side container rebuild with automatic rollback on health check failure - 7 new /api/v1/system/* endpoints for version info, update check/apply, rollback, and update history - Frontend: DashCaddy tab in Updates modal with version display, changelog, update button, rollback, and notification dot - install.sh: updater service installation, volume mounts, env vars - build-release.sh + webhook-handler.js: release server pipeline (Gitea webhook → build tarball → deploy to get.dashcaddy.net) - Dockerfile: DASHCADDY_COMMIT build arg → VERSION file - Version bump to 1.1.0 Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -12,6 +12,9 @@ COPY *.js ./
|
||||
COPY routes/ ./routes/
|
||||
COPY openapi.yaml ./
|
||||
|
||||
ARG DASHCADDY_COMMIT=unknown
|
||||
RUN echo "${DASHCADDY_COMMIT}" > VERSION
|
||||
|
||||
# Note: Running as root because container needs Docker socket access
|
||||
# (which is root-equivalent anyway). Socket access required for container management.
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
// ── App Identity ──────────────────────────────────────────────
|
||||
const APP = {
|
||||
NAME: 'DashCaddy',
|
||||
VERSION: '1.0',
|
||||
VERSION: '1.1',
|
||||
PORT: 3001,
|
||||
USER_AGENTS: {
|
||||
PROBE: 'DashCaddy-Probe/1.0',
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "dashcaddy-api",
|
||||
"version": "1.0.0",
|
||||
"version": "1.1.0",
|
||||
"description": "DashCaddy API server - Dashboard backend for Docker, Caddy & DNS management",
|
||||
"main": "server.js",
|
||||
"scripts": {
|
||||
|
||||
@@ -95,6 +95,7 @@ const ctx = {
|
||||
resourceMonitor: null,
|
||||
auditLogger: null,
|
||||
portLockManager: null,
|
||||
selfUpdater: null,
|
||||
|
||||
// Templates
|
||||
APP_TEMPLATES: null,
|
||||
|
||||
@@ -59,5 +59,69 @@ module.exports = function(ctx) {
|
||||
res.json({ success: true, message: 'Update scheduled', scheduledTime });
|
||||
}, 'updates-schedule'));
|
||||
|
||||
// ===== DASHCADDY SELF-UPDATE ENDPOINTS =====
|
||||
|
||||
// Get current version
|
||||
router.get('/system/version', ctx.asyncHandler(async (req, res) => {
|
||||
const local = ctx.selfUpdater.getLocalVersion();
|
||||
res.json({ success: true, name: 'DashCaddy', version: local.version, commit: local.commit });
|
||||
}, 'system-version'));
|
||||
|
||||
// Check for DashCaddy update
|
||||
router.get('/system/update-check', ctx.asyncHandler(async (req, res) => {
|
||||
const result = await ctx.selfUpdater.checkForUpdate();
|
||||
res.json({ success: true, ...result });
|
||||
}, 'system-update-check'));
|
||||
|
||||
// Apply available update
|
||||
router.post('/system/update-apply', ctx.asyncHandler(async (req, res) => {
|
||||
const check = await ctx.selfUpdater.checkForUpdate();
|
||||
if (!check.available) {
|
||||
return res.json({ success: true, message: 'Already up to date' });
|
||||
}
|
||||
// Start async — container may restart
|
||||
ctx.selfUpdater.applyUpdate(check.remote).catch(err => {
|
||||
ctx.logError('self-update', err);
|
||||
});
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Update initiated',
|
||||
fromVersion: check.local.version,
|
||||
toVersion: check.remote.version,
|
||||
});
|
||||
}, 'system-update-apply'));
|
||||
|
||||
// Get update status
|
||||
router.get('/system/update-status', ctx.asyncHandler(async (req, res) => {
|
||||
res.json({
|
||||
success: true,
|
||||
status: ctx.selfUpdater.getStatus(),
|
||||
lastCheck: ctx.selfUpdater.lastCheckTime,
|
||||
lastResult: ctx.selfUpdater.lastCheckResult,
|
||||
});
|
||||
}, 'system-update-status'));
|
||||
|
||||
// Get self-update history
|
||||
router.get('/system/update-history', ctx.asyncHandler(async (req, res) => {
|
||||
const history = ctx.selfUpdater.getUpdateHistory();
|
||||
res.json({ success: true, history });
|
||||
}, 'system-update-history'));
|
||||
|
||||
// List rollback versions
|
||||
router.get('/system/rollback-versions', ctx.asyncHandler(async (req, res) => {
|
||||
const versions = ctx.selfUpdater.getAvailableRollbacks();
|
||||
res.json({ success: true, versions });
|
||||
}, 'system-rollback-versions'));
|
||||
|
||||
// Rollback to a previous version
|
||||
router.post('/system/rollback', ctx.asyncHandler(async (req, res) => {
|
||||
const { version } = req.body;
|
||||
if (!version) return ctx.errorResponse(res, 400, 'version is required');
|
||||
ctx.selfUpdater.rollbackToVersion(version).catch(err => {
|
||||
ctx.logError('self-rollback', err);
|
||||
});
|
||||
res.json({ success: true, message: `Rollback to ${version} initiated` });
|
||||
}, 'system-rollback'));
|
||||
|
||||
return router;
|
||||
};
|
||||
|
||||
111
dashcaddy-api/scripts/build-release.sh
Normal file
111
dashcaddy-api/scripts/build-release.sh
Normal file
@@ -0,0 +1,111 @@
|
||||
#!/usr/bin/env bash
|
||||
# DashCaddy Release Builder
|
||||
# Triggered by Gitea webhook on push to main.
|
||||
# Clones repo, builds tarball, writes version.json, deploys to web root.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
readonly REPO_URL="http://100.98.123.59:3000/sami7777/dashcaddy.git"
|
||||
readonly RELEASE_DIR="/var/www/get.dashcaddy.net/release"
|
||||
readonly BUILD_DIR="/tmp/dashcaddy-build-$$"
|
||||
readonly MIRROR_HOST="root@100.98.123.59" # Contabo DE
|
||||
readonly BRANCH="main"
|
||||
|
||||
log() { echo "[build-release] $(date '+%Y-%m-%d %H:%M:%S') $*"; }
|
||||
|
||||
cleanup() { rm -rf "$BUILD_DIR"; }
|
||||
trap cleanup EXIT
|
||||
|
||||
main() {
|
||||
log "=== Starting release build ==="
|
||||
|
||||
# 1. Clone latest
|
||||
mkdir -p "$BUILD_DIR"
|
||||
log "Cloning ${BRANCH}..."
|
||||
git clone --depth 1 --branch "$BRANCH" "$REPO_URL" "$BUILD_DIR/repo" 2>&1
|
||||
cd "$BUILD_DIR/repo"
|
||||
|
||||
local commit
|
||||
commit=$(git rev-parse --short HEAD)
|
||||
log "Commit: ${commit}"
|
||||
|
||||
# 2. Read version from package.json
|
||||
local version
|
||||
version=$(python3 -c "import json; print(json.load(open('dashcaddy-api/package.json'))['version'])")
|
||||
log "Version: ${version}"
|
||||
|
||||
# 3. Build changelog (last 10 commits, one-liner)
|
||||
local changelog
|
||||
changelog=$(git log --oneline -10 --no-decorate 2>/dev/null || echo "${commit} (no log)")
|
||||
|
||||
# 4. Assemble tarball contents
|
||||
local staging="$BUILD_DIR/dashcaddy"
|
||||
mkdir -p "$staging/dashcaddy-api/routes" "$staging/status" "$staging/scripts"
|
||||
|
||||
# API files
|
||||
cp -f dashcaddy-api/*.js "$staging/dashcaddy-api/" 2>/dev/null || true
|
||||
cp -rf dashcaddy-api/routes/* "$staging/dashcaddy-api/routes/" 2>/dev/null || true
|
||||
cp -f dashcaddy-api/package.json "$staging/dashcaddy-api/"
|
||||
cp -f dashcaddy-api/package-lock.json "$staging/dashcaddy-api/" 2>/dev/null || true
|
||||
cp -f dashcaddy-api/Dockerfile "$staging/dashcaddy-api/"
|
||||
cp -f dashcaddy-api/openapi.yaml "$staging/dashcaddy-api/" 2>/dev/null || true
|
||||
|
||||
# Dashboard files
|
||||
cp -f status/index.html "$staging/status/"
|
||||
cp -f status/sw.js "$staging/status/" 2>/dev/null || true
|
||||
for dir in css js dist vendor assets; do
|
||||
[ -d "status/${dir}" ] && cp -rf "status/${dir}" "$staging/status/"
|
||||
done
|
||||
|
||||
# Updater scripts
|
||||
cp -f dashcaddy-api/scripts/dashcaddy-update.sh "$staging/scripts/" 2>/dev/null || true
|
||||
cp -f dashcaddy-api/scripts/dashcaddy-updater.path "$staging/scripts/" 2>/dev/null || true
|
||||
cp -f dashcaddy-api/scripts/dashcaddy-updater.service "$staging/scripts/" 2>/dev/null || true
|
||||
|
||||
# 5. Create tarball
|
||||
local tarball="dashcaddy-${version}.tar.gz"
|
||||
cd "$BUILD_DIR"
|
||||
tar czf "$tarball" dashcaddy/
|
||||
log "Tarball: ${tarball} ($(du -h "$tarball" | cut -f1))"
|
||||
|
||||
# 6. Compute SHA-256
|
||||
local sha256
|
||||
sha256=$(sha256sum "$tarball" | cut -d' ' -f1)
|
||||
log "SHA-256: ${sha256}"
|
||||
|
||||
# 7. Write version.json
|
||||
cat > version.json <<EOF
|
||||
{
|
||||
"version": "${version}",
|
||||
"commit": "${commit}",
|
||||
"date": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||
"sha256": "${sha256}",
|
||||
"changelog": $(python3 -c "import json; print(json.dumps('''${changelog}'''))"),
|
||||
"breaking": false,
|
||||
"tarball": "${tarball}"
|
||||
}
|
||||
EOF
|
||||
|
||||
# 8. Deploy to web root
|
||||
mkdir -p "$RELEASE_DIR"
|
||||
cp -f "$tarball" "$RELEASE_DIR/"
|
||||
cp -f version.json "$RELEASE_DIR/"
|
||||
# Also keep a "latest" symlink/copy
|
||||
cp -f "$tarball" "$RELEASE_DIR/latest.tar.gz"
|
||||
log "Deployed to ${RELEASE_DIR}"
|
||||
|
||||
# 9. Sync to mirror (Contabo DE)
|
||||
if ssh -o ConnectTimeout=5 "$MIRROR_HOST" true 2>/dev/null; then
|
||||
log "Syncing to mirror..."
|
||||
rsync -az --timeout=30 "$RELEASE_DIR/" "$MIRROR_HOST:/var/www/get2.dashcaddy.net/release/" 2>&1 || {
|
||||
log "WARNING: Mirror sync failed (non-fatal)"
|
||||
}
|
||||
log "Mirror synced"
|
||||
else
|
||||
log "WARNING: Mirror host unreachable, skipping sync"
|
||||
fi
|
||||
|
||||
log "=== Release build complete: v${version} (${commit}) ==="
|
||||
}
|
||||
|
||||
main "$@"
|
||||
219
dashcaddy-api/scripts/dashcaddy-update.sh
Normal file
219
dashcaddy-api/scripts/dashcaddy-update.sh
Normal file
@@ -0,0 +1,219 @@
|
||||
#!/usr/bin/env bash
|
||||
# DashCaddy Host-Side Updater
|
||||
# Triggered by systemd path unit when the API container writes trigger.json.
|
||||
# Handles API container rebuild + restart with automatic rollback on failure.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
readonly UPDATES_DIR="/opt/dashcaddy/updates"
|
||||
readonly TRIGGER_FILE="${UPDATES_DIR}/trigger.json"
|
||||
readonly RESULT_FILE="${UPDATES_DIR}/result.json"
|
||||
readonly BACKUP_BASE="${UPDATES_DIR}/backups"
|
||||
readonly HEALTH_URL="http://localhost:3001/health"
|
||||
readonly HEALTH_TIMEOUT=60
|
||||
readonly MAX_BACKUPS=3
|
||||
|
||||
log() { echo "[dashcaddy-update] $(date '+%Y-%m-%d %H:%M:%S') $*"; }
|
||||
|
||||
write_result() {
|
||||
local success="$1" message="$2" version="$3" duration="$4"
|
||||
cat > "$RESULT_FILE" <<EOF
|
||||
{
|
||||
"success": ${success},
|
||||
"version": "${version}",
|
||||
"message": "${message}",
|
||||
"duration": ${duration},
|
||||
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
||||
}
|
||||
EOF
|
||||
log "Result written: success=${success} version=${version}"
|
||||
}
|
||||
|
||||
backup_current() {
|
||||
local api_dir="$1" version="$2"
|
||||
local backup_dir="${BACKUP_BASE}/${version}"
|
||||
mkdir -p "$backup_dir/routes"
|
||||
|
||||
cp -f "${api_dir}"/*.js "$backup_dir/" 2>/dev/null || true
|
||||
cp -rf "${api_dir}/routes/"* "$backup_dir/routes/" 2>/dev/null || true
|
||||
cp -f "${api_dir}/package.json" "$backup_dir/" 2>/dev/null || true
|
||||
cp -f "${api_dir}/package-lock.json" "$backup_dir/" 2>/dev/null || true
|
||||
cp -f "${api_dir}/Dockerfile" "$backup_dir/" 2>/dev/null || true
|
||||
cp -f "${api_dir}/openapi.yaml" "$backup_dir/" 2>/dev/null || true
|
||||
|
||||
log "Backed up version ${version} to ${backup_dir}"
|
||||
}
|
||||
|
||||
restore_backup() {
|
||||
local api_dir="$1" version="$2"
|
||||
local backup_dir="${BACKUP_BASE}/${version}"
|
||||
|
||||
if [ ! -d "$backup_dir" ]; then
|
||||
log "ERROR: No backup found for version ${version}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
cp -f "${backup_dir}"/*.js "$api_dir/" 2>/dev/null || true
|
||||
cp -rf "${backup_dir}/routes/"* "${api_dir}/routes/" 2>/dev/null || true
|
||||
cp -f "${backup_dir}/package.json" "$api_dir/" 2>/dev/null || true
|
||||
cp -f "${backup_dir}/package-lock.json" "$api_dir/" 2>/dev/null || true
|
||||
cp -f "${backup_dir}/Dockerfile" "$api_dir/" 2>/dev/null || true
|
||||
cp -f "${backup_dir}/openapi.yaml" "$api_dir/" 2>/dev/null || true
|
||||
|
||||
log "Restored version ${version} from ${backup_dir}"
|
||||
}
|
||||
|
||||
copy_new_files() {
|
||||
local staging_dir="$1" api_dir="$2"
|
||||
|
||||
cp -f "${staging_dir}"/*.js "$api_dir/" 2>/dev/null || true
|
||||
[ -d "${staging_dir}/routes" ] && cp -rf "${staging_dir}/routes/"* "${api_dir}/routes/" 2>/dev/null || true
|
||||
cp -f "${staging_dir}/package.json" "$api_dir/" 2>/dev/null || true
|
||||
cp -f "${staging_dir}/package-lock.json" "$api_dir/" 2>/dev/null || true
|
||||
cp -f "${staging_dir}/Dockerfile" "$api_dir/" 2>/dev/null || true
|
||||
cp -f "${staging_dir}/openapi.yaml" "$api_dir/" 2>/dev/null || true
|
||||
|
||||
log "Copied new files from ${staging_dir} to ${api_dir}"
|
||||
}
|
||||
|
||||
wait_for_health() {
|
||||
local attempt=0
|
||||
local max_attempts=$((HEALTH_TIMEOUT / 2))
|
||||
|
||||
while (( attempt < max_attempts )); do
|
||||
if curl -fsS --max-time 3 "$HEALTH_URL" >/dev/null 2>&1; then
|
||||
log "Health check passed (attempt $((attempt+1)))"
|
||||
return 0
|
||||
fi
|
||||
sleep 2
|
||||
attempt=$((attempt + 1))
|
||||
done
|
||||
|
||||
log "Health check FAILED after ${HEALTH_TIMEOUT}s"
|
||||
return 1
|
||||
}
|
||||
|
||||
find_compose_dir() {
|
||||
# Find the docker-compose.yml for dashcaddy-api
|
||||
for dir in /etc/dashcaddy/sites/dashcaddy-api /etc/dashcaddy/sites/caddy-api; do
|
||||
if [ -f "${dir}/docker-compose.yml" ] || [ -f "${dir}/docker-compose.yaml" ]; then
|
||||
echo "$dir"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
# Fallback: same as api source
|
||||
echo "$1"
|
||||
}
|
||||
|
||||
cleanup_old_backups() {
|
||||
if [ ! -d "$BACKUP_BASE" ]; then return; fi
|
||||
local count
|
||||
count=$(ls -1d "${BACKUP_BASE}"/*/ 2>/dev/null | wc -l)
|
||||
if (( count > MAX_BACKUPS )); then
|
||||
local to_remove=$((count - MAX_BACKUPS))
|
||||
ls -1d "${BACKUP_BASE}"/*/ 2>/dev/null | head -n "$to_remove" | while read -r dir; do
|
||||
rm -rf "$dir"
|
||||
log "Cleaned old backup: $dir"
|
||||
done
|
||||
fi
|
||||
}
|
||||
|
||||
main() {
|
||||
if [ ! -f "$TRIGGER_FILE" ]; then
|
||||
log "No trigger file found, exiting"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
local start_time
|
||||
start_time=$(date +%s)
|
||||
|
||||
# Parse trigger file
|
||||
local action version from_version staging_dir api_dir commit
|
||||
action=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('action','update'))" 2>/dev/null || echo "update")
|
||||
version=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('version','unknown'))" 2>/dev/null || echo "unknown")
|
||||
from_version=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('fromVersion','unknown'))" 2>/dev/null || echo "unknown")
|
||||
staging_dir=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('stagingDir',''))" 2>/dev/null || echo "")
|
||||
api_dir=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('apiSourceDir','/opt/dashcaddy'))" 2>/dev/null || echo "/opt/dashcaddy")
|
||||
commit=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('commit','unknown'))" 2>/dev/null || echo "unknown")
|
||||
|
||||
log "=== DashCaddy ${action} started: ${from_version} → ${version} (${commit}) ==="
|
||||
|
||||
if [ -z "$staging_dir" ] || [ ! -d "$staging_dir" ]; then
|
||||
log "ERROR: Staging directory not found: ${staging_dir}"
|
||||
write_result "false" "Staging directory not found" "$version" "0"
|
||||
rm -f "$TRIGGER_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local compose_dir
|
||||
compose_dir=$(find_compose_dir "$api_dir")
|
||||
|
||||
# Step 1: Backup current version
|
||||
log "Step 1: Backing up current version (${from_version})"
|
||||
backup_current "$api_dir" "$from_version"
|
||||
|
||||
# Step 2: Copy new files
|
||||
log "Step 2: Copying new files"
|
||||
copy_new_files "$staging_dir" "$api_dir"
|
||||
|
||||
# Write commit hash to VERSION file
|
||||
echo "$commit" > "${api_dir}/VERSION"
|
||||
|
||||
# Step 3: Rebuild container
|
||||
log "Step 3: Building new container image"
|
||||
cd "$compose_dir"
|
||||
if ! DASHCADDY_COMMIT="$commit" docker compose build --quiet 2>&1; then
|
||||
log "ERROR: docker compose build failed, rolling back"
|
||||
restore_backup "$api_dir" "$from_version"
|
||||
local elapsed=$(( $(date +%s) - start_time ))
|
||||
write_result "false" "Build failed, rolled back to ${from_version}" "$version" "$((elapsed * 1000))"
|
||||
rm -f "$TRIGGER_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Step 4: Restart container
|
||||
log "Step 4: Restarting container"
|
||||
if ! docker compose up -d 2>&1; then
|
||||
log "ERROR: docker compose up failed, rolling back"
|
||||
restore_backup "$api_dir" "$from_version"
|
||||
docker compose build --quiet 2>&1 || true
|
||||
docker compose up -d 2>&1 || true
|
||||
local elapsed=$(( $(date +%s) - start_time ))
|
||||
write_result "false" "Container start failed, rolled back to ${from_version}" "$version" "$((elapsed * 1000))"
|
||||
rm -f "$TRIGGER_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Step 5: Health check
|
||||
log "Step 5: Waiting for health check (${HEALTH_TIMEOUT}s timeout)"
|
||||
if wait_for_health; then
|
||||
local elapsed=$(( $(date +%s) - start_time ))
|
||||
log "=== Update to ${version} SUCCESSFUL (${elapsed}s) ==="
|
||||
write_result "true" "Update successful" "$version" "$((elapsed * 1000))"
|
||||
else
|
||||
log "Health check failed — ROLLING BACK to ${from_version}"
|
||||
restore_backup "$api_dir" "$from_version"
|
||||
cd "$compose_dir"
|
||||
docker compose build --quiet 2>&1 || true
|
||||
docker compose up -d 2>&1 || true
|
||||
|
||||
if wait_for_health; then
|
||||
local elapsed=$(( $(date +%s) - start_time ))
|
||||
log "Rollback to ${from_version} succeeded"
|
||||
write_result "false" "Health check failed after update. Rolled back to ${from_version}." "$version" "$((elapsed * 1000))"
|
||||
else
|
||||
local elapsed=$(( $(date +%s) - start_time ))
|
||||
log "CRITICAL: Rollback also failed. Manual intervention required."
|
||||
write_result "false" "CRITICAL: Both update and rollback failed. Manual intervention required." "$version" "$((elapsed * 1000))"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Cleanup
|
||||
rm -f "$TRIGGER_FILE"
|
||||
rm -rf "${UPDATES_DIR}/staging"
|
||||
cleanup_old_backups
|
||||
|
||||
log "Update process complete"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
10
dashcaddy-api/scripts/dashcaddy-updater.path
Normal file
10
dashcaddy-api/scripts/dashcaddy-updater.path
Normal file
@@ -0,0 +1,10 @@
|
||||
[Unit]
|
||||
Description=Watch for DashCaddy update trigger
|
||||
Documentation=https://dashcaddy.net
|
||||
|
||||
[Path]
|
||||
PathChanged=/opt/dashcaddy/updates/trigger.json
|
||||
MakeDirectory=yes
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
13
dashcaddy-api/scripts/dashcaddy-updater.service
Normal file
13
dashcaddy-api/scripts/dashcaddy-updater.service
Normal file
@@ -0,0 +1,13 @@
|
||||
[Unit]
|
||||
Description=DashCaddy auto-update handler
|
||||
Documentation=https://dashcaddy.net
|
||||
After=docker.service
|
||||
Requires=docker.service
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart=/opt/dashcaddy/scripts/dashcaddy-update.sh
|
||||
TimeoutStartSec=300
|
||||
StandardOutput=journal
|
||||
StandardError=journal
|
||||
SyslogIdentifier=dashcaddy-update
|
||||
136
dashcaddy-api/scripts/webhook-handler.js
Normal file
136
dashcaddy-api/scripts/webhook-handler.js
Normal file
@@ -0,0 +1,136 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* DashCaddy Release Webhook Handler
|
||||
* Receives push webhooks from Gitea, verifies HMAC signature,
|
||||
* and triggers build-release.sh.
|
||||
*
|
||||
* Usage: node webhook-handler.js
|
||||
* Env vars:
|
||||
* WEBHOOK_SECRET — Gitea webhook secret (required)
|
||||
* WEBHOOK_PORT — Listen port (default: 9090)
|
||||
* BUILD_SCRIPT — Path to build script (default: /opt/dashcaddy-release/build-release.sh)
|
||||
*/
|
||||
|
||||
const http = require('http');
|
||||
const crypto = require('crypto');
|
||||
const { spawn } = require('child_process');
|
||||
const fs = require('fs');
|
||||
|
||||
const PORT = parseInt(process.env.WEBHOOK_PORT || '9090', 10);
|
||||
const SECRET = process.env.WEBHOOK_SECRET;
|
||||
const BUILD_SCRIPT = process.env.BUILD_SCRIPT || '/opt/dashcaddy-release/build-release.sh';
|
||||
const LOG_FILE = '/var/log/dashcaddy-release.log';
|
||||
|
||||
if (!SECRET) {
|
||||
console.error('WEBHOOK_SECRET environment variable is required');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let buildRunning = false;
|
||||
|
||||
function log(msg) {
|
||||
const line = `[webhook] ${new Date().toISOString()} ${msg}`;
|
||||
console.log(line);
|
||||
fs.appendFileSync(LOG_FILE, line + '\n');
|
||||
}
|
||||
|
||||
function verifySignature(body, signature) {
|
||||
if (!signature) return false;
|
||||
const hmac = crypto.createHmac('sha256', SECRET).update(body).digest('hex');
|
||||
return crypto.timingSafeEqual(
|
||||
Buffer.from(signature),
|
||||
Buffer.from(hmac)
|
||||
);
|
||||
}
|
||||
|
||||
function triggerBuild() {
|
||||
if (buildRunning) {
|
||||
log('Build already in progress, skipping');
|
||||
return;
|
||||
}
|
||||
buildRunning = true;
|
||||
log('Triggering build...');
|
||||
|
||||
const child = spawn('bash', [BUILD_SCRIPT], {
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
env: { ...process.env, PATH: process.env.PATH },
|
||||
});
|
||||
|
||||
child.stdout.on('data', (data) => {
|
||||
const lines = data.toString().trim().split('\n');
|
||||
lines.forEach(line => log(`[build] ${line}`));
|
||||
});
|
||||
|
||||
child.stderr.on('data', (data) => {
|
||||
const lines = data.toString().trim().split('\n');
|
||||
lines.forEach(line => log(`[build:err] ${line}`));
|
||||
});
|
||||
|
||||
child.on('close', (code) => {
|
||||
buildRunning = false;
|
||||
if (code === 0) {
|
||||
log('Build completed successfully');
|
||||
} else {
|
||||
log(`Build FAILED with exit code ${code}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
// Health check
|
||||
if (req.method === 'GET' && req.url === '/health') {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ status: 'ok', buildRunning }));
|
||||
return;
|
||||
}
|
||||
|
||||
// Only accept POST to /webhook
|
||||
if (req.method !== 'POST' || req.url !== '/webhook') {
|
||||
res.writeHead(404);
|
||||
res.end('Not found');
|
||||
return;
|
||||
}
|
||||
|
||||
let body = '';
|
||||
req.on('data', chunk => { body += chunk; });
|
||||
req.on('end', () => {
|
||||
// Verify Gitea HMAC signature
|
||||
const sig = req.headers['x-gitea-signature'] || '';
|
||||
if (!verifySignature(body, sig)) {
|
||||
log('Signature verification FAILED');
|
||||
res.writeHead(403);
|
||||
res.end('Invalid signature');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const payload = JSON.parse(body);
|
||||
const ref = payload.ref || '';
|
||||
const branch = ref.replace('refs/heads/', '');
|
||||
|
||||
if (branch !== 'main') {
|
||||
log(`Ignoring push to ${branch} (not main)`);
|
||||
res.writeHead(200);
|
||||
res.end('Ignored (not main branch)');
|
||||
return;
|
||||
}
|
||||
|
||||
const pusher = payload.pusher?.login || 'unknown';
|
||||
const commits = payload.commits?.length || 0;
|
||||
log(`Push to main by ${pusher}: ${commits} commit(s)`);
|
||||
|
||||
triggerBuild();
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ accepted: true }));
|
||||
} catch (e) {
|
||||
log('Failed to parse webhook payload: ' + e.message);
|
||||
res.writeHead(400);
|
||||
res.end('Invalid payload');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
server.listen(PORT, '0.0.0.0', () => {
|
||||
log(`Webhook handler listening on 127.0.0.1:${PORT}`);
|
||||
});
|
||||
515
dashcaddy-api/self-updater.js
Normal file
515
dashcaddy-api/self-updater.js
Normal file
@@ -0,0 +1,515 @@
|
||||
/**
|
||||
* DashCaddy Self-Updater
|
||||
* Polls for new versions, downloads and stages updates,
|
||||
* triggers host-side updater for API container rebuilds.
|
||||
*
|
||||
* Frontend files are updated directly (zero-downtime).
|
||||
* API files require a container rebuild via the host-side systemd service.
|
||||
*/
|
||||
|
||||
const EventEmitter = require('events');
|
||||
const https = require('https');
|
||||
const http = require('http');
|
||||
const fs = require('fs');
|
||||
const fsp = require('fs').promises;
|
||||
const path = require('path');
|
||||
const crypto = require('crypto');
|
||||
const { execSync } = require('child_process');
|
||||
const zlib = require('zlib');
|
||||
|
||||
const isWindows = process.platform === 'win32';
|
||||
|
||||
const DEFAULTS = {
|
||||
CHECK_INTERVAL: 30 * 60 * 1000, // 30 minutes
|
||||
UPDATE_URL: 'https://get.dashcaddy.net/release',
|
||||
MIRROR_URL: 'https://get2.dashcaddy.net/release',
|
||||
UPDATES_DIR: isWindows ? 'C:/caddy/updates' : '/app/updates',
|
||||
// API_SOURCE_DIR is the HOST path — written to trigger.json for the host-side updater
|
||||
API_SOURCE_DIR: isWindows ? 'C:/caddy/sites/dashcaddy-api' : '/etc/dashcaddy/sites/dashcaddy-api',
|
||||
// FRONTEND_DIR is the container path — dashboard is volume-mounted at /app/dashboard
|
||||
FRONTEND_DIR: isWindows ? 'C:/caddy/sites/status' : '/app/dashboard',
|
||||
MAX_BACKUPS: 3,
|
||||
HEALTH_TIMEOUT: 60000,
|
||||
DOWNLOAD_TIMEOUT: 120000,
|
||||
};
|
||||
|
||||
class SelfUpdater extends EventEmitter {
|
||||
constructor(options = {}) {
|
||||
super();
|
||||
this.config = {
|
||||
enabled: options.enabled !== false,
|
||||
checkInterval: parseInt(options.checkInterval || DEFAULTS.CHECK_INTERVAL, 10),
|
||||
updateUrl: options.updateUrl || DEFAULTS.UPDATE_URL,
|
||||
mirrorUrl: options.mirrorUrl || DEFAULTS.MIRROR_URL,
|
||||
updatesDir: options.updatesDir || DEFAULTS.UPDATES_DIR,
|
||||
apiSourceDir: options.apiSourceDir || DEFAULTS.API_SOURCE_DIR,
|
||||
frontendDir: options.frontendDir || DEFAULTS.FRONTEND_DIR,
|
||||
maxBackups: parseInt(options.maxBackups || DEFAULTS.MAX_BACKUPS, 10),
|
||||
};
|
||||
|
||||
this.status = 'idle'; // idle | checking | downloading | applying | waiting
|
||||
this.checkTimer = null;
|
||||
this.lastCheckTime = null;
|
||||
this.lastCheckResult = null;
|
||||
|
||||
// Ensure directories exist
|
||||
this._ensureDirs();
|
||||
}
|
||||
|
||||
// ── Lifecycle ──
|
||||
|
||||
start() {
|
||||
if (!this.config.enabled || this.checkTimer) return;
|
||||
|
||||
console.log('[SelfUpdater] Starting auto-update checks every %ds', this.config.checkInterval / 1000);
|
||||
|
||||
// First check after a short delay (let server finish startup)
|
||||
setTimeout(() => {
|
||||
this._autoCheckAndApply();
|
||||
this.checkTimer = setInterval(() => this._autoCheckAndApply(), this.config.checkInterval);
|
||||
}, 15000);
|
||||
}
|
||||
|
||||
stop() {
|
||||
if (this.checkTimer) {
|
||||
clearInterval(this.checkTimer);
|
||||
this.checkTimer = null;
|
||||
}
|
||||
}
|
||||
|
||||
// ── Version Info ──
|
||||
|
||||
getLocalVersion() {
|
||||
try {
|
||||
const pkg = JSON.parse(fs.readFileSync(path.join(__dirname, 'package.json'), 'utf8'));
|
||||
let commit = null;
|
||||
try {
|
||||
commit = fs.readFileSync(path.join(__dirname, 'VERSION'), 'utf8').trim();
|
||||
} catch (_) {}
|
||||
return { version: pkg.version, commit };
|
||||
} catch (e) {
|
||||
return { version: '0.0.0', commit: null };
|
||||
}
|
||||
}
|
||||
|
||||
getStatus() {
|
||||
return this.status;
|
||||
}
|
||||
|
||||
// ── Check for Updates ──
|
||||
|
||||
async checkForUpdate() {
|
||||
this.status = 'checking';
|
||||
try {
|
||||
let remote;
|
||||
try {
|
||||
remote = await this._fetchJson(`${this.config.updateUrl}/version.json`);
|
||||
} catch (primaryErr) {
|
||||
console.warn('[SelfUpdater] Primary server failed:', primaryErr.message, '— trying mirror');
|
||||
try {
|
||||
remote = await this._fetchJson(`${this.config.mirrorUrl}/version.json`);
|
||||
} catch (mirrorErr) {
|
||||
this.status = 'idle';
|
||||
this.lastCheckTime = Date.now();
|
||||
this.lastCheckResult = { available: false, error: 'Update servers unreachable' };
|
||||
return this.lastCheckResult;
|
||||
}
|
||||
}
|
||||
|
||||
const local = this.getLocalVersion();
|
||||
const available = this._isNewer(local, remote);
|
||||
|
||||
this.lastCheckTime = Date.now();
|
||||
this.lastCheckResult = { available, local, remote };
|
||||
this.status = 'idle';
|
||||
|
||||
if (available) {
|
||||
this.emit('update-available', remote);
|
||||
}
|
||||
|
||||
return this.lastCheckResult;
|
||||
} catch (e) {
|
||||
this.status = 'idle';
|
||||
this.lastCheckTime = Date.now();
|
||||
this.lastCheckResult = { available: false, error: e.message };
|
||||
return this.lastCheckResult;
|
||||
}
|
||||
}
|
||||
|
||||
// ── Apply Update ──
|
||||
|
||||
async applyUpdate(remoteInfo) {
|
||||
if (this.status !== 'idle' && this.status !== 'checking') {
|
||||
throw new Error(`Update already in progress (status: ${this.status})`);
|
||||
}
|
||||
|
||||
const local = this.getLocalVersion();
|
||||
const stagingDir = path.join(this.config.updatesDir, 'staging');
|
||||
|
||||
try {
|
||||
// 1. Download
|
||||
this.status = 'downloading';
|
||||
this.emit('update-progress', { step: 'downloading', version: remoteInfo.version });
|
||||
|
||||
const tarballUrl = `${this.config.updateUrl}/${remoteInfo.tarball}`;
|
||||
const tarballPath = path.join(this.config.updatesDir, remoteInfo.tarball);
|
||||
await this._downloadFile(tarballUrl, tarballPath);
|
||||
|
||||
// 2. Verify SHA-256
|
||||
const hash = await this._computeSha256(tarballPath);
|
||||
if (hash !== remoteInfo.sha256) {
|
||||
await fsp.unlink(tarballPath).catch(() => {});
|
||||
throw new Error(`SHA-256 mismatch: expected ${remoteInfo.sha256}, got ${hash}`);
|
||||
}
|
||||
|
||||
// 3. Extract
|
||||
this.status = 'applying';
|
||||
this.emit('update-progress', { step: 'extracting', version: remoteInfo.version });
|
||||
|
||||
await this._cleanDir(stagingDir);
|
||||
await this._extractTarball(tarballPath, stagingDir);
|
||||
|
||||
// 4. Apply frontend files directly (zero-downtime)
|
||||
const frontendSrc = this._findDir(stagingDir, 'status');
|
||||
if (frontendSrc) {
|
||||
await this._copyDir(frontendSrc, this.config.frontendDir, [
|
||||
'dist', 'css', 'assets', 'vendor', 'index.html', 'sw.js'
|
||||
]);
|
||||
this.emit('update-progress', { step: 'frontend-updated', version: remoteInfo.version });
|
||||
}
|
||||
|
||||
// 5. Trigger API rebuild (Linux only — host-side systemd service)
|
||||
const apiSrc = this._findDir(stagingDir, 'dashcaddy-api');
|
||||
if (apiSrc && !isWindows) {
|
||||
this.status = 'waiting';
|
||||
this.emit('update-progress', { step: 'triggering-rebuild', version: remoteInfo.version });
|
||||
|
||||
const trigger = {
|
||||
action: 'update',
|
||||
version: remoteInfo.version,
|
||||
commit: remoteInfo.commit,
|
||||
fromVersion: local.version,
|
||||
stagingDir: apiSrc,
|
||||
apiSourceDir: this.config.apiSourceDir,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
await fsp.writeFile(
|
||||
path.join(this.config.updatesDir, 'trigger.json'),
|
||||
JSON.stringify(trigger, null, 2)
|
||||
);
|
||||
|
||||
// The host-side systemd service will handle the rest.
|
||||
// After container restart, checkPostUpdateResult() reads the result.
|
||||
this._addToHistory({
|
||||
version: remoteInfo.version,
|
||||
fromVersion: local.version,
|
||||
timestamp: new Date().toISOString(),
|
||||
status: 'pending',
|
||||
frontendUpdated: !!frontendSrc,
|
||||
apiUpdated: true,
|
||||
});
|
||||
} else if (isWindows) {
|
||||
// Windows: frontend updated, API needs manual restart
|
||||
this._addToHistory({
|
||||
version: remoteInfo.version,
|
||||
fromVersion: local.version,
|
||||
timestamp: new Date().toISOString(),
|
||||
status: 'partial',
|
||||
frontendUpdated: !!frontendSrc,
|
||||
apiUpdated: false,
|
||||
note: 'API update requires manual container restart on Windows',
|
||||
});
|
||||
this.status = 'idle';
|
||||
}
|
||||
|
||||
// Clean up tarball
|
||||
await fsp.unlink(tarballPath).catch(() => {});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
fromVersion: local.version,
|
||||
toVersion: remoteInfo.version,
|
||||
frontendUpdated: !!frontendSrc,
|
||||
apiUpdated: !isWindows && !!apiSrc,
|
||||
};
|
||||
} catch (e) {
|
||||
this.status = 'idle';
|
||||
this._addToHistory({
|
||||
version: remoteInfo.version,
|
||||
fromVersion: local.version,
|
||||
timestamp: new Date().toISOString(),
|
||||
status: 'failed',
|
||||
error: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
// ── Post-Update Result ──
|
||||
|
||||
async checkPostUpdateResult() {
|
||||
const resultPath = path.join(this.config.updatesDir, 'result.json');
|
||||
try {
|
||||
const data = await fsp.readFile(resultPath, 'utf8');
|
||||
const result = JSON.parse(data);
|
||||
// Delete the result file so we don't process it again
|
||||
await fsp.unlink(resultPath).catch(() => {});
|
||||
|
||||
// Update history
|
||||
const history = this.getUpdateHistory();
|
||||
const pending = history.find(h => h.status === 'pending');
|
||||
if (pending) {
|
||||
pending.status = result.success ? 'success' : 'rolled-back';
|
||||
pending.duration = result.duration;
|
||||
if (result.error) pending.error = result.error;
|
||||
this._saveHistory(history);
|
||||
}
|
||||
|
||||
this.status = 'idle';
|
||||
return result;
|
||||
} catch (_) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ── Rollback ──
|
||||
|
||||
async rollbackToVersion(version) {
|
||||
if (isWindows) throw new Error('Auto-rollback not supported on Windows');
|
||||
|
||||
const backupDir = path.join(this.config.updatesDir, 'backups', version);
|
||||
try {
|
||||
await fsp.access(backupDir);
|
||||
} catch (_) {
|
||||
throw new Error(`No backup found for version ${version}`);
|
||||
}
|
||||
|
||||
const local = this.getLocalVersion();
|
||||
const trigger = {
|
||||
action: 'rollback',
|
||||
version: version,
|
||||
fromVersion: local.version,
|
||||
stagingDir: backupDir,
|
||||
apiSourceDir: this.config.apiSourceDir,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
|
||||
this.status = 'waiting';
|
||||
await fsp.writeFile(
|
||||
path.join(this.config.updatesDir, 'trigger.json'),
|
||||
JSON.stringify(trigger, null, 2)
|
||||
);
|
||||
|
||||
this._addToHistory({
|
||||
version: version,
|
||||
fromVersion: local.version,
|
||||
timestamp: new Date().toISOString(),
|
||||
status: 'pending',
|
||||
rollback: true,
|
||||
});
|
||||
}
|
||||
|
||||
getAvailableRollbacks() {
|
||||
const backupsDir = path.join(this.config.updatesDir, 'backups');
|
||||
try {
|
||||
return fs.readdirSync(backupsDir)
|
||||
.filter(d => fs.statSync(path.join(backupsDir, d)).isDirectory())
|
||||
.sort()
|
||||
.reverse();
|
||||
} catch (_) {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// ── History ──
|
||||
|
||||
getUpdateHistory() {
|
||||
const historyPath = path.join(this.config.updatesDir, 'self-update-history.json');
|
||||
try {
|
||||
return JSON.parse(fs.readFileSync(historyPath, 'utf8'));
|
||||
} catch (_) {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// ── Private Methods ──
|
||||
|
||||
async _autoCheckAndApply() {
|
||||
try {
|
||||
const result = await this.checkForUpdate();
|
||||
if (result.available && result.remote) {
|
||||
console.log('[SelfUpdater] Update available: %s → %s', result.local.version, result.remote.version);
|
||||
await this.applyUpdate(result.remote);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('[SelfUpdater] Auto-update error:', e.message);
|
||||
}
|
||||
}
|
||||
|
||||
_isNewer(local, remote) {
|
||||
if (!remote || !remote.version) return false;
|
||||
// Compare semver: split into [major, minor, patch]
|
||||
const lv = (local.version || '0.0.0').split('.').map(Number);
|
||||
const rv = remote.version.split('.').map(Number);
|
||||
for (let i = 0; i < 3; i++) {
|
||||
if ((rv[i] || 0) > (lv[i] || 0)) return true;
|
||||
if ((rv[i] || 0) < (lv[i] || 0)) return false;
|
||||
}
|
||||
// Same version — check commit hash
|
||||
if (remote.commit && local.commit && remote.commit !== local.commit) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
_addToHistory(entry) {
|
||||
const history = this.getUpdateHistory();
|
||||
history.unshift(entry);
|
||||
// Keep last 50 entries
|
||||
if (history.length > 50) history.length = 50;
|
||||
this._saveHistory(history);
|
||||
}
|
||||
|
||||
_saveHistory(history) {
|
||||
const historyPath = path.join(this.config.updatesDir, 'self-update-history.json');
|
||||
try {
|
||||
fs.writeFileSync(historyPath, JSON.stringify(history, null, 2));
|
||||
} catch (e) {
|
||||
console.error('[SelfUpdater] Failed to save history:', e.message);
|
||||
}
|
||||
}
|
||||
|
||||
async _ensureDirs() {
|
||||
for (const dir of [this.config.updatesDir, path.join(this.config.updatesDir, 'staging'), path.join(this.config.updatesDir, 'backups')]) {
|
||||
await fsp.mkdir(dir, { recursive: true }).catch(() => {});
|
||||
}
|
||||
}
|
||||
|
||||
async _fetchJson(url) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const mod = url.startsWith('https') ? https : http;
|
||||
const req = mod.get(url, { timeout: 15000 }, (res) => {
|
||||
if (res.statusCode !== 200) {
|
||||
res.resume();
|
||||
return reject(new Error(`HTTP ${res.statusCode} from ${url}`));
|
||||
}
|
||||
let data = '';
|
||||
res.on('data', chunk => data += chunk);
|
||||
res.on('end', () => {
|
||||
try {
|
||||
resolve(JSON.parse(data));
|
||||
} catch (e) {
|
||||
reject(new Error('Invalid JSON from ' + url));
|
||||
}
|
||||
});
|
||||
});
|
||||
req.on('error', reject);
|
||||
req.on('timeout', () => { req.destroy(); reject(new Error('Timeout fetching ' + url)); });
|
||||
});
|
||||
}
|
||||
|
||||
async _downloadFile(url, dest) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const mod = url.startsWith('https') ? https : http;
|
||||
const file = fs.createWriteStream(dest);
|
||||
const req = mod.get(url, { timeout: DEFAULTS.DOWNLOAD_TIMEOUT }, (res) => {
|
||||
if (res.statusCode !== 200) {
|
||||
file.close();
|
||||
fs.unlinkSync(dest);
|
||||
return reject(new Error(`HTTP ${res.statusCode} downloading ${url}`));
|
||||
}
|
||||
res.pipe(file);
|
||||
file.on('finish', () => { file.close(resolve); });
|
||||
});
|
||||
req.on('error', (e) => {
|
||||
file.close();
|
||||
fs.unlink(dest, () => {});
|
||||
reject(e);
|
||||
});
|
||||
req.on('timeout', () => { req.destroy(); reject(new Error('Download timeout')); });
|
||||
});
|
||||
}
|
||||
|
||||
async _computeSha256(filePath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const hash = crypto.createHash('sha256');
|
||||
const stream = fs.createReadStream(filePath);
|
||||
stream.on('data', chunk => hash.update(chunk));
|
||||
stream.on('end', () => resolve(hash.digest('hex')));
|
||||
stream.on('error', reject);
|
||||
});
|
||||
}
|
||||
|
||||
async _extractTarball(tarballPath, destDir) {
|
||||
await fsp.mkdir(destDir, { recursive: true });
|
||||
// Use tar command (available on Linux, and Git Bash on Windows)
|
||||
try {
|
||||
execSync(`tar xzf "${tarballPath}" -C "${destDir}" --strip-components=1`, { stdio: 'pipe' });
|
||||
} catch (e) {
|
||||
throw new Error('Failed to extract tarball: ' + e.message);
|
||||
}
|
||||
}
|
||||
|
||||
_findDir(baseDir, name) {
|
||||
const direct = path.join(baseDir, name);
|
||||
if (fs.existsSync(direct)) return direct;
|
||||
// Also check one level deeper (e.g., dashcaddy/dashcaddy-api)
|
||||
try {
|
||||
for (const entry of fs.readdirSync(baseDir)) {
|
||||
const sub = path.join(baseDir, entry, name);
|
||||
if (fs.existsSync(sub)) return sub;
|
||||
}
|
||||
} catch (_) {}
|
||||
return null;
|
||||
}
|
||||
|
||||
async _copyDir(src, dest, items) {
|
||||
await fsp.mkdir(dest, { recursive: true });
|
||||
for (const item of items) {
|
||||
const srcPath = path.join(src, item);
|
||||
const destPath = path.join(dest, item);
|
||||
try {
|
||||
const stat = await fsp.stat(srcPath);
|
||||
if (stat.isDirectory()) {
|
||||
await this._copyDirRecursive(srcPath, destPath);
|
||||
} else {
|
||||
await fsp.copyFile(srcPath, destPath);
|
||||
}
|
||||
} catch (_) {
|
||||
// Item may not exist in the update — skip
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async _copyDirRecursive(src, dest) {
|
||||
await fsp.mkdir(dest, { recursive: true });
|
||||
const entries = await fsp.readdir(src, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const srcPath = path.join(src, entry.name);
|
||||
const destPath = path.join(dest, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
await this._copyDirRecursive(srcPath, destPath);
|
||||
} else {
|
||||
await fsp.copyFile(srcPath, destPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async _cleanDir(dir) {
|
||||
try {
|
||||
await fsp.rm(dir, { recursive: true, force: true });
|
||||
} catch (_) {}
|
||||
await fsp.mkdir(dir, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton
|
||||
const selfUpdater = new SelfUpdater({
|
||||
enabled: process.env.DASHCADDY_UPDATE_ENABLED !== 'false',
|
||||
checkInterval: process.env.DASHCADDY_UPDATE_INTERVAL,
|
||||
updateUrl: process.env.DASHCADDY_UPDATE_URL,
|
||||
mirrorUrl: process.env.DASHCADDY_MIRROR_URL,
|
||||
updatesDir: process.env.DASHCADDY_UPDATES_DIR,
|
||||
apiSourceDir: process.env.DASHCADDY_API_SOURCE_DIR,
|
||||
frontendDir: process.env.DASHCADDY_FRONTEND_DIR,
|
||||
});
|
||||
|
||||
module.exports = selfUpdater;
|
||||
@@ -49,6 +49,7 @@ const resourceMonitor = require('./resource-monitor');
|
||||
const backupManager = require('./backup-manager');
|
||||
const healthChecker = require('./health-checker');
|
||||
const updateManager = require('./update-manager');
|
||||
const selfUpdater = require('./self-updater');
|
||||
const StateManager = require('./state-manager');
|
||||
const auditLogger = require('./audit-logger');
|
||||
const portLockManager = require('./port-lock-manager');
|
||||
@@ -1160,7 +1161,7 @@ Object.assign(ctx, {
|
||||
app, siteConfig, servicesStateManager, configStateManager,
|
||||
credentialManager, authManager, licenseManager,
|
||||
healthChecker, updateManager, backupManager, resourceMonitor,
|
||||
auditLogger, portLockManager,
|
||||
auditLogger, portLockManager, selfUpdater,
|
||||
APP_TEMPLATES, TEMPLATE_CATEGORIES, DIFFICULTY_LEVELS, RECIPE_TEMPLATES, RECIPE_CATEGORIES,
|
||||
asyncHandler, errorResponse, ok, fetchT, log, logError, safeErrorMessage,
|
||||
buildDomain, buildServiceUrl, getServiceById, readConfig, saveConfig, addServiceToConfig,
|
||||
@@ -1863,6 +1864,26 @@ const server = app.listen(PORT, '0.0.0.0', () => {
|
||||
} catch (error) {
|
||||
log.error('server', 'Update manager failed to start', { error: error.message });
|
||||
}
|
||||
|
||||
try {
|
||||
selfUpdater.start();
|
||||
log.info('server', 'Self-updater started', { interval: selfUpdater.config.checkInterval, url: selfUpdater.config.updateUrl });
|
||||
// Check for post-update result (did a previous update succeed or roll back?)
|
||||
selfUpdater.checkPostUpdateResult().then(result => {
|
||||
if (result) {
|
||||
log.info('server', 'Post-update result', result);
|
||||
if (typeof ctx.notification?.send === 'function') {
|
||||
ctx.notification.send('system.update',
|
||||
result.success ? 'DashCaddy Updated' : 'DashCaddy Update Failed',
|
||||
result.success ? `Updated to v${result.version}` : `Update failed: ${result.error || 'Unknown'}. Rolled back.`,
|
||||
result.success ? 'info' : 'error'
|
||||
);
|
||||
}
|
||||
}
|
||||
}).catch(() => {});
|
||||
} catch (error) {
|
||||
log.error('server', 'Self-updater failed to start', { error: error.message });
|
||||
}
|
||||
|
||||
// Tailscale API sync (if OAuth configured)
|
||||
if (tailscaleConfig.oauthConfigured) {
|
||||
@@ -1881,6 +1902,7 @@ function shutdown(signal) {
|
||||
backupManager.stop();
|
||||
healthChecker.stop();
|
||||
updateManager.stop();
|
||||
selfUpdater.stop();
|
||||
stopTailscaleSyncTimer();
|
||||
server.close(() => {
|
||||
log.info('shutdown', 'HTTP server closed');
|
||||
|
||||
Reference in New Issue
Block a user