Sync DNS2 production changes - removed obsolete test suite and refactored structure
This commit is contained in:
@@ -1,111 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# DashCaddy Release Builder
|
||||
# Triggered by Gitea webhook on push to main.
|
||||
# Clones repo, builds tarball, writes version.json, deploys to web root.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
readonly REPO_URL="http://sami7777:2728bb667201841b08cb35ac101ffe52838f7d11@100.98.123.59:3000/sami7777/dashcaddy.git"
|
||||
readonly RELEASE_DIR="/var/www/get.dashcaddy.net/release"
|
||||
readonly BUILD_DIR="/tmp/dashcaddy-build-$$"
|
||||
readonly MIRROR_HOST="root@100.98.123.59" # Contabo DE
|
||||
readonly BRANCH="main"
|
||||
|
||||
log() { echo "[build-release] $(date '+%Y-%m-%d %H:%M:%S') $*"; }
|
||||
|
||||
cleanup() { rm -rf "$BUILD_DIR"; }
|
||||
trap cleanup EXIT
|
||||
|
||||
main() {
|
||||
log "=== Starting release build ==="
|
||||
|
||||
# 1. Clone latest
|
||||
mkdir -p "$BUILD_DIR"
|
||||
log "Cloning ${BRANCH}..."
|
||||
git clone --depth 1 --branch "$BRANCH" "$REPO_URL" "$BUILD_DIR/repo" 2>&1
|
||||
cd "$BUILD_DIR/repo"
|
||||
|
||||
local commit
|
||||
commit=$(git rev-parse --short HEAD)
|
||||
log "Commit: ${commit}"
|
||||
|
||||
# 2. Read version from package.json
|
||||
local version
|
||||
version=$(python3 -c "import json; print(json.load(open('dashcaddy-api/package.json'))['version'])")
|
||||
log "Version: ${version}"
|
||||
|
||||
# 3. Build changelog (last 10 commits, one-liner)
|
||||
local changelog
|
||||
changelog=$(git log --oneline -10 --no-decorate 2>/dev/null || echo "${commit} (no log)")
|
||||
|
||||
# 4. Assemble tarball contents
|
||||
local staging="$BUILD_DIR/dashcaddy"
|
||||
mkdir -p "$staging/dashcaddy-api/routes" "$staging/status" "$staging/scripts"
|
||||
|
||||
# API files
|
||||
cp -f dashcaddy-api/*.js "$staging/dashcaddy-api/" 2>/dev/null || true
|
||||
cp -rf dashcaddy-api/routes/* "$staging/dashcaddy-api/routes/" 2>/dev/null || true
|
||||
cp -f dashcaddy-api/package.json "$staging/dashcaddy-api/"
|
||||
cp -f dashcaddy-api/package-lock.json "$staging/dashcaddy-api/" 2>/dev/null || true
|
||||
cp -f dashcaddy-api/Dockerfile "$staging/dashcaddy-api/"
|
||||
cp -f dashcaddy-api/openapi.yaml "$staging/dashcaddy-api/" 2>/dev/null || true
|
||||
|
||||
# Dashboard files
|
||||
cp -f status/index.html "$staging/status/"
|
||||
cp -f status/sw.js "$staging/status/" 2>/dev/null || true
|
||||
for dir in css js dist vendor assets; do
|
||||
[ -d "status/${dir}" ] && cp -rf "status/${dir}" "$staging/status/"
|
||||
done
|
||||
|
||||
# Updater scripts
|
||||
cp -f dashcaddy-api/scripts/dashcaddy-update.sh "$staging/scripts/" 2>/dev/null || true
|
||||
cp -f dashcaddy-api/scripts/dashcaddy-updater.path "$staging/scripts/" 2>/dev/null || true
|
||||
cp -f dashcaddy-api/scripts/dashcaddy-updater.service "$staging/scripts/" 2>/dev/null || true
|
||||
|
||||
# 5. Create tarball
|
||||
local tarball="dashcaddy-${version}.tar.gz"
|
||||
cd "$BUILD_DIR"
|
||||
tar czf "$tarball" dashcaddy/
|
||||
log "Tarball: ${tarball} ($(du -h "$tarball" | cut -f1))"
|
||||
|
||||
# 6. Compute SHA-256
|
||||
local sha256
|
||||
sha256=$(sha256sum "$tarball" | cut -d' ' -f1)
|
||||
log "SHA-256: ${sha256}"
|
||||
|
||||
# 7. Write version.json
|
||||
cat > version.json <<EOF
|
||||
{
|
||||
"version": "${version}",
|
||||
"commit": "${commit}",
|
||||
"date": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||
"sha256": "${sha256}",
|
||||
"changelog": $(python3 -c "import json; print(json.dumps('''${changelog}'''))"),
|
||||
"breaking": false,
|
||||
"tarball": "${tarball}"
|
||||
}
|
||||
EOF
|
||||
|
||||
# 8. Deploy to web root
|
||||
mkdir -p "$RELEASE_DIR"
|
||||
cp -f "$tarball" "$RELEASE_DIR/"
|
||||
cp -f version.json "$RELEASE_DIR/"
|
||||
# Also keep a "latest" symlink/copy
|
||||
cp -f "$tarball" "$RELEASE_DIR/latest.tar.gz"
|
||||
log "Deployed to ${RELEASE_DIR}"
|
||||
|
||||
# 9. Sync to mirror (Contabo DE)
|
||||
if ssh -o ConnectTimeout=5 "$MIRROR_HOST" true 2>/dev/null; then
|
||||
log "Syncing to mirror..."
|
||||
rsync -az --timeout=30 "$RELEASE_DIR/" "$MIRROR_HOST:/var/www/get2.dashcaddy.net/release/" 2>&1 || {
|
||||
log "WARNING: Mirror sync failed (non-fatal)"
|
||||
}
|
||||
log "Mirror synced"
|
||||
else
|
||||
log "WARNING: Mirror host unreachable, skipping sync"
|
||||
fi
|
||||
|
||||
log "=== Release build complete: v${version} (${commit}) ==="
|
||||
}
|
||||
|
||||
main "$@"
|
||||
@@ -1,219 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# DashCaddy Host-Side Updater
|
||||
# Triggered by systemd path unit when the API container writes trigger.json.
|
||||
# Handles API container rebuild + restart with automatic rollback on failure.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
readonly UPDATES_DIR="/opt/dashcaddy/updates"
|
||||
readonly TRIGGER_FILE="${UPDATES_DIR}/trigger.json"
|
||||
readonly RESULT_FILE="${UPDATES_DIR}/result.json"
|
||||
readonly BACKUP_BASE="${UPDATES_DIR}/backups"
|
||||
readonly HEALTH_URL="http://localhost:3001/health"
|
||||
readonly HEALTH_TIMEOUT=60
|
||||
readonly MAX_BACKUPS=3
|
||||
|
||||
log() { echo "[dashcaddy-update] $(date '+%Y-%m-%d %H:%M:%S') $*"; }
|
||||
|
||||
write_result() {
|
||||
local success="$1" message="$2" version="$3" duration="$4"
|
||||
cat > "$RESULT_FILE" <<EOF
|
||||
{
|
||||
"success": ${success},
|
||||
"version": "${version}",
|
||||
"message": "${message}",
|
||||
"duration": ${duration},
|
||||
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
||||
}
|
||||
EOF
|
||||
log "Result written: success=${success} version=${version}"
|
||||
}
|
||||
|
||||
backup_current() {
|
||||
local api_dir="$1" version="$2"
|
||||
local backup_dir="${BACKUP_BASE}/${version}"
|
||||
mkdir -p "$backup_dir/routes"
|
||||
|
||||
cp -f "${api_dir}"/*.js "$backup_dir/" 2>/dev/null || true
|
||||
cp -rf "${api_dir}/routes/"* "$backup_dir/routes/" 2>/dev/null || true
|
||||
cp -f "${api_dir}/package.json" "$backup_dir/" 2>/dev/null || true
|
||||
cp -f "${api_dir}/package-lock.json" "$backup_dir/" 2>/dev/null || true
|
||||
cp -f "${api_dir}/Dockerfile" "$backup_dir/" 2>/dev/null || true
|
||||
cp -f "${api_dir}/openapi.yaml" "$backup_dir/" 2>/dev/null || true
|
||||
|
||||
log "Backed up version ${version} to ${backup_dir}"
|
||||
}
|
||||
|
||||
restore_backup() {
|
||||
local api_dir="$1" version="$2"
|
||||
local backup_dir="${BACKUP_BASE}/${version}"
|
||||
|
||||
if [ ! -d "$backup_dir" ]; then
|
||||
log "ERROR: No backup found for version ${version}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
cp -f "${backup_dir}"/*.js "$api_dir/" 2>/dev/null || true
|
||||
cp -rf "${backup_dir}/routes/"* "${api_dir}/routes/" 2>/dev/null || true
|
||||
cp -f "${backup_dir}/package.json" "$api_dir/" 2>/dev/null || true
|
||||
cp -f "${backup_dir}/package-lock.json" "$api_dir/" 2>/dev/null || true
|
||||
cp -f "${backup_dir}/Dockerfile" "$api_dir/" 2>/dev/null || true
|
||||
cp -f "${backup_dir}/openapi.yaml" "$api_dir/" 2>/dev/null || true
|
||||
|
||||
log "Restored version ${version} from ${backup_dir}"
|
||||
}
|
||||
|
||||
copy_new_files() {
|
||||
local staging_dir="$1" api_dir="$2"
|
||||
|
||||
cp -f "${staging_dir}"/*.js "$api_dir/" 2>/dev/null || true
|
||||
[ -d "${staging_dir}/routes" ] && cp -rf "${staging_dir}/routes/"* "${api_dir}/routes/" 2>/dev/null || true
|
||||
cp -f "${staging_dir}/package.json" "$api_dir/" 2>/dev/null || true
|
||||
cp -f "${staging_dir}/package-lock.json" "$api_dir/" 2>/dev/null || true
|
||||
cp -f "${staging_dir}/Dockerfile" "$api_dir/" 2>/dev/null || true
|
||||
cp -f "${staging_dir}/openapi.yaml" "$api_dir/" 2>/dev/null || true
|
||||
|
||||
log "Copied new files from ${staging_dir} to ${api_dir}"
|
||||
}
|
||||
|
||||
wait_for_health() {
|
||||
local attempt=0
|
||||
local max_attempts=$((HEALTH_TIMEOUT / 2))
|
||||
|
||||
while (( attempt < max_attempts )); do
|
||||
if curl -fsS --max-time 3 "$HEALTH_URL" >/dev/null 2>&1; then
|
||||
log "Health check passed (attempt $((attempt+1)))"
|
||||
return 0
|
||||
fi
|
||||
sleep 2
|
||||
attempt=$((attempt + 1))
|
||||
done
|
||||
|
||||
log "Health check FAILED after ${HEALTH_TIMEOUT}s"
|
||||
return 1
|
||||
}
|
||||
|
||||
find_compose_dir() {
|
||||
# Find the docker-compose.yml for dashcaddy-api
|
||||
for dir in /etc/dashcaddy/sites/dashcaddy-api /etc/dashcaddy/sites/caddy-api; do
|
||||
if [ -f "${dir}/docker-compose.yml" ] || [ -f "${dir}/docker-compose.yaml" ]; then
|
||||
echo "$dir"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
# Fallback: same as api source
|
||||
echo "$1"
|
||||
}
|
||||
|
||||
cleanup_old_backups() {
|
||||
if [ ! -d "$BACKUP_BASE" ]; then return; fi
|
||||
local count
|
||||
count=$(ls -1d "${BACKUP_BASE}"/*/ 2>/dev/null | wc -l)
|
||||
if (( count > MAX_BACKUPS )); then
|
||||
local to_remove=$((count - MAX_BACKUPS))
|
||||
ls -1d "${BACKUP_BASE}"/*/ 2>/dev/null | head -n "$to_remove" | while read -r dir; do
|
||||
rm -rf "$dir"
|
||||
log "Cleaned old backup: $dir"
|
||||
done
|
||||
fi
|
||||
}
|
||||
|
||||
main() {
|
||||
if [ ! -f "$TRIGGER_FILE" ]; then
|
||||
log "No trigger file found, exiting"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
local start_time
|
||||
start_time=$(date +%s)
|
||||
|
||||
# Parse trigger file
|
||||
local action version from_version staging_dir api_dir commit
|
||||
action=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('action','update'))" 2>/dev/null || echo "update")
|
||||
version=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('version','unknown'))" 2>/dev/null || echo "unknown")
|
||||
from_version=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('fromVersion','unknown'))" 2>/dev/null || echo "unknown")
|
||||
staging_dir=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('stagingDir',''))" 2>/dev/null || echo "")
|
||||
api_dir=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('apiSourceDir','/opt/dashcaddy'))" 2>/dev/null || echo "/opt/dashcaddy")
|
||||
commit=$(python3 -c "import json,sys; d=json.load(open('$TRIGGER_FILE')); print(d.get('commit','unknown'))" 2>/dev/null || echo "unknown")
|
||||
|
||||
log "=== DashCaddy ${action} started: ${from_version} → ${version} (${commit}) ==="
|
||||
|
||||
if [ -z "$staging_dir" ] || [ ! -d "$staging_dir" ]; then
|
||||
log "ERROR: Staging directory not found: ${staging_dir}"
|
||||
write_result "false" "Staging directory not found" "$version" "0"
|
||||
rm -f "$TRIGGER_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local compose_dir
|
||||
compose_dir=$(find_compose_dir "$api_dir")
|
||||
|
||||
# Step 1: Backup current version
|
||||
log "Step 1: Backing up current version (${from_version})"
|
||||
backup_current "$api_dir" "$from_version"
|
||||
|
||||
# Step 2: Copy new files
|
||||
log "Step 2: Copying new files"
|
||||
copy_new_files "$staging_dir" "$api_dir"
|
||||
|
||||
# Write commit hash to VERSION file
|
||||
echo "$commit" > "${api_dir}/VERSION"
|
||||
|
||||
# Step 3: Rebuild container
|
||||
log "Step 3: Building new container image"
|
||||
cd "$compose_dir"
|
||||
if ! docker compose build --build-arg DASHCADDY_COMMIT="$commit" --quiet 2>&1; then
|
||||
log "ERROR: docker compose build failed, rolling back"
|
||||
restore_backup "$api_dir" "$from_version"
|
||||
local elapsed=$(( $(date +%s) - start_time ))
|
||||
write_result "false" "Build failed, rolled back to ${from_version}" "$version" "$((elapsed * 1000))"
|
||||
rm -f "$TRIGGER_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Step 4: Restart container
|
||||
log "Step 4: Restarting container"
|
||||
if ! docker compose up -d 2>&1; then
|
||||
log "ERROR: docker compose up failed, rolling back"
|
||||
restore_backup "$api_dir" "$from_version"
|
||||
docker compose build --quiet 2>&1 || true
|
||||
docker compose up -d 2>&1 || true
|
||||
local elapsed=$(( $(date +%s) - start_time ))
|
||||
write_result "false" "Container start failed, rolled back to ${from_version}" "$version" "$((elapsed * 1000))"
|
||||
rm -f "$TRIGGER_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Step 5: Health check
|
||||
log "Step 5: Waiting for health check (${HEALTH_TIMEOUT}s timeout)"
|
||||
if wait_for_health; then
|
||||
local elapsed=$(( $(date +%s) - start_time ))
|
||||
log "=== Update to ${version} SUCCESSFUL (${elapsed}s) ==="
|
||||
write_result "true" "Update successful" "$version" "$((elapsed * 1000))"
|
||||
else
|
||||
log "Health check failed — ROLLING BACK to ${from_version}"
|
||||
restore_backup "$api_dir" "$from_version"
|
||||
cd "$compose_dir"
|
||||
docker compose build --quiet 2>&1 || true
|
||||
docker compose up -d 2>&1 || true
|
||||
|
||||
if wait_for_health; then
|
||||
local elapsed=$(( $(date +%s) - start_time ))
|
||||
log "Rollback to ${from_version} succeeded"
|
||||
write_result "false" "Health check failed after update. Rolled back to ${from_version}." "$version" "$((elapsed * 1000))"
|
||||
else
|
||||
local elapsed=$(( $(date +%s) - start_time ))
|
||||
log "CRITICAL: Rollback also failed. Manual intervention required."
|
||||
write_result "false" "CRITICAL: Both update and rollback failed. Manual intervention required." "$version" "$((elapsed * 1000))"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Cleanup
|
||||
rm -f "$TRIGGER_FILE"
|
||||
rm -rf "${UPDATES_DIR}/staging"
|
||||
cleanup_old_backups
|
||||
|
||||
log "Update process complete"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
@@ -1,9 +0,0 @@
|
||||
[Unit]
|
||||
Description=Watch for DashCaddy update trigger
|
||||
Documentation=https://dashcaddy.net
|
||||
|
||||
[Path]
|
||||
PathChanged=/opt/dashcaddy/updates/trigger.json
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -1,13 +0,0 @@
|
||||
[Unit]
|
||||
Description=DashCaddy auto-update handler
|
||||
Documentation=https://dashcaddy.net
|
||||
After=docker.service
|
||||
Requires=docker.service
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart=/opt/dashcaddy/scripts/dashcaddy-update.sh
|
||||
TimeoutStartSec=300
|
||||
StandardOutput=journal
|
||||
StandardError=journal
|
||||
SyslogIdentifier=dashcaddy-update
|
||||
@@ -1,120 +0,0 @@
|
||||
#Requires -RunAsAdministrator
|
||||
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Installs the DashCaddy Root CA certificate to the Trusted Root Certification Authorities store.
|
||||
|
||||
.DESCRIPTION
|
||||
This script downloads the root CA certificate from your DashCaddy instance, verifies its fingerprint,
|
||||
and installs it to the local machine's trusted root store.
|
||||
#>
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
# ==========================================
|
||||
# CONFIGURATION (Injected by DashCaddy API)
|
||||
# ==========================================
|
||||
$CertUrl = "{{CERT_URL}}"
|
||||
$ExpectedFingerprint = "{{CERT_FINGERPRINT}}"
|
||||
# ==========================================
|
||||
|
||||
$TempFile = "$env:TEMP\dashcaddy-root-ca.crt"
|
||||
|
||||
# Colors
|
||||
$Red = [System.ConsoleColor]::Red
|
||||
$Green = [System.ConsoleColor]::Green
|
||||
$Cyan = [System.ConsoleColor]::Cyan
|
||||
$Yellow = [System.ConsoleColor]::Yellow
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "========================================" -ForegroundColor $Cyan
|
||||
Write-Host " DashCaddy Certificate Installer" -ForegroundColor $Cyan
|
||||
Write-Host "========================================" -ForegroundColor $Cyan
|
||||
Write-Host ""
|
||||
|
||||
# Step 1: Download certificate
|
||||
Write-Host "[1/4] Downloading certificate..." -ForegroundColor $Cyan
|
||||
try {
|
||||
$ProgressPreference = 'SilentlyContinue'
|
||||
|
||||
# Bypass SSL validation — the user doesn't trust the CA yet, that's the whole point
|
||||
if (-not ([System.Management.Automation.PSTypeName]'TrustAllCertsPolicy').Type) {
|
||||
Add-Type @"
|
||||
using System.Net;
|
||||
using System.Security.Cryptography.X509Certificates;
|
||||
public class TrustAllCertsPolicy : ICertificatePolicy {
|
||||
public bool CheckValidationResult(ServicePoint sp, X509Certificate cert, WebRequest req, int problem) { return true; }
|
||||
}
|
||||
"@
|
||||
}
|
||||
[System.Net.ServicePointManager]::CertificatePolicy = New-Object TrustAllCertsPolicy
|
||||
|
||||
Invoke-WebRequest -Uri $CertUrl -OutFile $TempFile -UseBasicParsing -ErrorAction Stop
|
||||
Write-Host " OK Certificate downloaded" -ForegroundColor $Green
|
||||
} catch {
|
||||
Write-Host " FAIL Failed to download certificate from $CertUrl" -ForegroundColor $Red
|
||||
Write-Host " Error: $_" -ForegroundColor $Red
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Step 2: Verify fingerprint
|
||||
Write-Host "[2/4] Verifying certificate fingerprint..." -ForegroundColor $Cyan
|
||||
try {
|
||||
$Cert = New-Object System.Security.Cryptography.X509Certificates.X509Certificate2($TempFile)
|
||||
$Fingerprint = $Cert.Thumbprint
|
||||
|
||||
$NormalizedExpected = $ExpectedFingerprint -replace '[:\s]', ''
|
||||
$NormalizedActual = $Fingerprint -replace '[:\s]', ''
|
||||
|
||||
if ($NormalizedActual -ne $NormalizedExpected) {
|
||||
Write-Host " FAIL Fingerprint mismatch!" -ForegroundColor $Red
|
||||
Write-Host " Expected: $ExpectedFingerprint" -ForegroundColor $Yellow
|
||||
Write-Host " Got: $Fingerprint" -ForegroundColor $Red
|
||||
Remove-Item $TempFile -Force
|
||||
Write-Host ""
|
||||
Write-Host "SECURITY WARNING: The downloaded certificate does not match the expected fingerprint." -ForegroundColor $Red
|
||||
exit 1
|
||||
}
|
||||
Write-Host " OK Fingerprint verified" -ForegroundColor $Green
|
||||
} catch {
|
||||
Write-Host " FAIL Failed to verify fingerprint: $_" -ForegroundColor $Red
|
||||
Remove-Item $TempFile -Force -ErrorAction SilentlyContinue
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Step 3: Check if already installed
|
||||
Write-Host "[3/4] Checking for existing certificate..." -ForegroundColor $Cyan
|
||||
$ExistingCert = Get-ChildItem -Path Cert:\LocalMachine\Root | Where-Object { $_.Thumbprint -eq $Fingerprint }
|
||||
if ($ExistingCert) {
|
||||
Write-Host " INFO Certificate already installed" -ForegroundColor $Yellow
|
||||
Remove-Item $TempFile -Force
|
||||
Write-Host ""
|
||||
Write-Host "The DashCaddy Root CA is already trusted on this system." -ForegroundColor $Green
|
||||
Start-Sleep -Seconds 3
|
||||
exit 0
|
||||
}
|
||||
Write-Host " OK Not yet installed, proceeding..." -ForegroundColor $Green
|
||||
|
||||
# Step 4: Install certificate
|
||||
Write-Host "[4/4] Installing to Trusted Root store..." -ForegroundColor $Cyan
|
||||
try {
|
||||
$ImportedCert = Import-Certificate -FilePath $TempFile -CertStoreLocation Cert:\LocalMachine\Root -ErrorAction Stop
|
||||
Write-Host " OK Certificate installed successfully" -ForegroundColor $Green
|
||||
} catch {
|
||||
Write-Host " FAIL Failed to install certificate. Ensure you are running as Administrator." -ForegroundColor $Red
|
||||
Remove-Item $TempFile -Force -ErrorAction SilentlyContinue
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Cleanup
|
||||
Remove-Item $TempFile -Force -ErrorAction SilentlyContinue
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "========================================" -ForegroundColor $Green
|
||||
Write-Host " SUCCESS!" -ForegroundColor $Green
|
||||
Write-Host "========================================" -ForegroundColor $Green
|
||||
Write-Host ""
|
||||
Write-Host "Your browser will now trust DashCaddy apps." -ForegroundColor $Green
|
||||
Write-Host "You may need to restart your browser for changes to take effect." -ForegroundColor $Yellow
|
||||
Write-Host ""
|
||||
Start-Sleep -Seconds 3
|
||||
@@ -1,99 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# ==========================================
|
||||
# CONFIGURATION (Injected by DashCaddy API)
|
||||
# ==========================================
|
||||
CERT_URL="{{CERT_URL}}"
|
||||
EXPECTED_FP="{{CERT_FINGERPRINT}}"
|
||||
# ==========================================
|
||||
|
||||
TMPFILE="$(mktemp /tmp/dashcaddy-root-ca.XXXXXX.crt)"
|
||||
|
||||
RED='\033[0;31m'; GREEN='\033[0;32m'; CYAN='\033[0;36m'; YELLOW='\033[1;33m'; NC='\033[0m'
|
||||
|
||||
echo ""
|
||||
echo -e "${CYAN}========================================"
|
||||
echo " DashCaddy Certificate Installer"
|
||||
echo -e "========================================${NC}"
|
||||
echo ""
|
||||
|
||||
# Step 1: Download certificate (skip TLS verification — we verify the fingerprint instead)
|
||||
echo -e "${CYAN}[1/4] Downloading certificate...${NC}"
|
||||
if command -v curl &>/dev/null; then
|
||||
curl -fsSk -o "$TMPFILE" "$CERT_URL"
|
||||
elif command -v wget &>/dev/null; then
|
||||
wget -q --no-check-certificate -O "$TMPFILE" "$CERT_URL"
|
||||
else
|
||||
echo -e "${RED} FAIL Neither curl nor wget found${NC}"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "${GREEN} OK Certificate downloaded${NC}"
|
||||
|
||||
# Step 2: Verify fingerprint
|
||||
echo -e "${CYAN}[2/4] Verifying certificate fingerprint...${NC}"
|
||||
ACTUAL_FP=$(openssl x509 -in "$TMPFILE" -noout -fingerprint -sha256 2>/dev/null | sed 's/.*=//; s/://g')
|
||||
CLEAN_EXPECTED=$(echo "$EXPECTED_FP" | tr -d ': ')
|
||||
|
||||
if [ "$ACTUAL_FP" != "$CLEAN_EXPECTED" ]; then
|
||||
echo -e "${RED} FAIL Fingerprint mismatch!${NC}"
|
||||
echo -e "${YELLOW} Expected: $EXPECTED_FP${NC}"
|
||||
echo -e "${RED} Got: $ACTUAL_FP${NC}"
|
||||
rm -f "$TMPFILE"
|
||||
echo -e "${RED}SECURITY WARNING: Certificate does not match expected fingerprint.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "${GREEN} OK Fingerprint verified${NC}"
|
||||
|
||||
# Step 3: Detect OS and install
|
||||
echo -e "${CYAN}[3/4] Installing certificate...${NC}"
|
||||
|
||||
install_debian() {
|
||||
sudo cp "$TMPFILE" /usr/local/share/ca-certificates/dashcaddy-root-ca.crt
|
||||
sudo update-ca-certificates
|
||||
}
|
||||
|
||||
install_redhat() {
|
||||
sudo cp "$TMPFILE" /etc/pki/ca-trust/source/anchors/dashcaddy-root-ca.crt
|
||||
sudo update-ca-trust extract
|
||||
}
|
||||
|
||||
install_arch() {
|
||||
sudo cp "$TMPFILE" /etc/ca-certificates/trust-source/anchors/dashcaddy-root-ca.crt
|
||||
sudo trust extract-compat
|
||||
}
|
||||
|
||||
install_macos() {
|
||||
sudo security add-trusted-cert -d -r trustRoot -k /Library/Keychains/System.keychain "$TMPFILE"
|
||||
}
|
||||
|
||||
if [ "$(uname)" = "Darwin" ]; then
|
||||
install_macos
|
||||
elif [ -f /etc/debian_version ]; then
|
||||
install_debian
|
||||
elif [ -f /etc/redhat-release ]; then
|
||||
install_redhat
|
||||
elif [ -f /etc/arch-release ]; then
|
||||
install_arch
|
||||
elif command -v update-ca-certificates &>/dev/null; then
|
||||
install_debian
|
||||
elif command -v update-ca-trust &>/dev/null; then
|
||||
install_redhat
|
||||
else
|
||||
echo -e "${RED} FAIL Could not detect package manager. Install manually:${NC}"
|
||||
echo " Copy $TMPFILE to your system's CA trust store"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "${GREEN} OK Certificate installed${NC}"
|
||||
|
||||
# Step 4: Cleanup
|
||||
rm -f "$TMPFILE"
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}========================================"
|
||||
echo " SUCCESS!"
|
||||
echo -e "========================================${NC}"
|
||||
echo ""
|
||||
echo -e "${GREEN}Your system now trusts the DashCaddy Root CA.${NC}"
|
||||
echo -e "${YELLOW}Restart your browser for changes to take effect.${NC}"
|
||||
echo ""
|
||||
@@ -1,136 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* DashCaddy Release Webhook Handler
|
||||
* Receives push webhooks from Gitea, verifies HMAC signature,
|
||||
* and triggers build-release.sh.
|
||||
*
|
||||
* Usage: node webhook-handler.js
|
||||
* Env vars:
|
||||
* WEBHOOK_SECRET — Gitea webhook secret (required)
|
||||
* WEBHOOK_PORT — Listen port (default: 9090)
|
||||
* BUILD_SCRIPT — Path to build script (default: /opt/dashcaddy-release/build-release.sh)
|
||||
*/
|
||||
|
||||
const http = require('http');
|
||||
const crypto = require('crypto');
|
||||
const { spawn } = require('child_process');
|
||||
const fs = require('fs');
|
||||
|
||||
const PORT = parseInt(process.env.WEBHOOK_PORT || '9090', 10);
|
||||
const SECRET = process.env.WEBHOOK_SECRET;
|
||||
const BUILD_SCRIPT = process.env.BUILD_SCRIPT || '/opt/dashcaddy-release/build-release.sh';
|
||||
const LOG_FILE = '/var/log/dashcaddy-release.log';
|
||||
|
||||
if (!SECRET) {
|
||||
console.error('WEBHOOK_SECRET environment variable is required');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let buildRunning = false;
|
||||
|
||||
function log(msg) {
|
||||
const line = `[webhook] ${new Date().toISOString()} ${msg}`;
|
||||
console.log(line);
|
||||
fs.appendFileSync(LOG_FILE, `${line }\n`);
|
||||
}
|
||||
|
||||
function verifySignature(body, signature) {
|
||||
if (!signature) return false;
|
||||
const hmac = crypto.createHmac('sha256', SECRET).update(body).digest('hex');
|
||||
return crypto.timingSafeEqual(
|
||||
Buffer.from(signature),
|
||||
Buffer.from(hmac),
|
||||
);
|
||||
}
|
||||
|
||||
function triggerBuild() {
|
||||
if (buildRunning) {
|
||||
log('Build already in progress, skipping');
|
||||
return;
|
||||
}
|
||||
buildRunning = true;
|
||||
log('Triggering build...');
|
||||
|
||||
const child = spawn('bash', [BUILD_SCRIPT], {
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
env: { ...process.env, PATH: process.env.PATH },
|
||||
});
|
||||
|
||||
child.stdout.on('data', (data) => {
|
||||
const lines = data.toString().trim().split('\n');
|
||||
lines.forEach(line => log(`[build] ${line}`));
|
||||
});
|
||||
|
||||
child.stderr.on('data', (data) => {
|
||||
const lines = data.toString().trim().split('\n');
|
||||
lines.forEach(line => log(`[build:err] ${line}`));
|
||||
});
|
||||
|
||||
child.on('close', (code) => {
|
||||
buildRunning = false;
|
||||
if (code === 0) {
|
||||
log('Build completed successfully');
|
||||
} else {
|
||||
log(`Build FAILED with exit code ${code}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
// Health check
|
||||
if (req.method === 'GET' && req.url === '/health') {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ status: 'ok', buildRunning }));
|
||||
return;
|
||||
}
|
||||
|
||||
// Only accept POST to /webhook
|
||||
if (req.method !== 'POST' || req.url !== '/webhook') {
|
||||
res.writeHead(404);
|
||||
res.end('Not found');
|
||||
return;
|
||||
}
|
||||
|
||||
let body = '';
|
||||
req.on('data', chunk => { body += chunk; });
|
||||
req.on('end', () => {
|
||||
// Verify Gitea HMAC signature
|
||||
const sig = req.headers['x-gitea-signature'] || '';
|
||||
if (!verifySignature(body, sig)) {
|
||||
log('Signature verification FAILED');
|
||||
res.writeHead(403);
|
||||
res.end('Invalid signature');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const payload = JSON.parse(body);
|
||||
const ref = payload.ref || '';
|
||||
const branch = ref.replace('refs/heads/', '');
|
||||
|
||||
if (branch !== 'main') {
|
||||
log(`Ignoring push to ${branch} (not main)`);
|
||||
res.writeHead(200);
|
||||
res.end('Ignored (not main branch)');
|
||||
return;
|
||||
}
|
||||
|
||||
const pusher = payload.pusher?.login || 'unknown';
|
||||
const commits = payload.commits?.length || 0;
|
||||
log(`Push to main by ${pusher}: ${commits} commit(s)`);
|
||||
|
||||
triggerBuild();
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ accepted: true }));
|
||||
} catch (e) {
|
||||
log(`Failed to parse webhook payload: ${ e.message}`);
|
||||
res.writeHead(400);
|
||||
res.end('Invalid payload');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
server.listen(PORT, '0.0.0.0', () => {
|
||||
log(`Webhook handler listening on 127.0.0.1:${PORT}`);
|
||||
});
|
||||
Reference in New Issue
Block a user