Files
timmy-config/deploy/conduit/scripts/backup.sh
Ezra (Archivist) 1b33db499e [matrix] Add Conduit deployment scaffold for #166, #183
Architecture:
- ADR-1: Conduit selected over Synapse/Dendrite (Rust, low resource)
- ADR-2: Deploy on existing Gitea VPS initially
- ADR-3: Full federation enabled

Artifacts:
- docs/matrix-fleet-comms/README.md (architecture + runbooks)
- deploy/conduit/conduit.toml (production config)
- deploy/conduit/conduit.service (systemd)
- deploy/conduit/Caddyfile (reverse proxy)
- deploy/conduit/install.sh (one-command installer)
- deploy/conduit/scripts/backup.sh (automated backups)
- deploy/conduit/scripts/health.sh (health monitoring)

Closes #183 (scaffold complete)
Progresses #166 (implementation unblocked)
2026-04-05 04:38:15 +00:00

83 lines
2.1 KiB
Bash

#!/bin/bash
# Conduit Matrix Homeserver Backup Script
# Location: /opt/conduit/scripts/backup.sh
# Reference: docs/matrix-fleet-comms/README.md
# Run via cron: 0 3 * * * /opt/conduit/scripts/backup.sh
set -euo pipefail
# Configuration
BACKUP_BASE_DIR="/backups/conduit"
DATA_DIR="/opt/conduit/data"
CONFIG_FILE="/opt/conduit/conduit.toml"
RETENTION_DAYS=7
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
BACKUP_DIR="$BACKUP_BASE_DIR/$TIMESTAMP"
# Ensure backup directory exists
mkdir -p "$BACKUP_DIR"
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
}
log "Starting Conduit backup..."
# Check if Conduit is running
if systemctl is-active --quiet conduit; then
log "Stopping Conduit for consistent backup..."
systemctl stop conduit
RESTART_NEEDED=true
else
log "Conduit already stopped"
RESTART_NEEDED=false
fi
# Backup database
if [ -f "$DATA_DIR/conduit.db" ]; then
log "Backing up database..."
cp "$DATA_DIR/conduit.db" "$BACKUP_DIR/"
sqlite3 "$BACKUP_DIR/conduit.db" "VACUUM;"
else
log "WARNING: Database not found at $DATA_DIR/conduit.db"
fi
# Backup configuration
if [ -f "$CONFIG_FILE" ]; then
log "Backing up configuration..."
cp "$CONFIG_FILE" "$BACKUP_DIR/"
fi
# Backup media (if exists)
if [ -d "$DATA_DIR/media" ]; then
log "Backing up media files..."
cp -r "$DATA_DIR/media" "$BACKUP_DIR/"
fi
# Restart Conduit if it was running
if [ "$RESTART_NEEDED" = true ]; then
log "Restarting Conduit..."
systemctl start conduit
fi
# Create compressed archive
log "Creating compressed archive..."
cd "$BACKUP_BASE_DIR"
tar czf "$TIMESTAMP.tar.gz" -C "$BACKUP_DIR" .
rm -rf "$BACKUP_DIR"
ARCHIVE_SIZE=$(du -h "$BACKUP_BASE_DIR/$TIMESTAMP.tar.gz" | cut -f1)
log "Backup complete: $TIMESTAMP.tar.gz ($ARCHIVE_SIZE)"
# Upload to S3 (uncomment and configure when ready)
# if command -v aws &> /dev/null; then
# log "Uploading to S3..."
# aws s3 cp "$BACKUP_BASE_DIR/$TIMESTAMP.tar.gz" s3://timmy-backups/conduit/
# fi
# Cleanup old backups
log "Cleaning up backups older than $RETENTION_DAYS days..."
find "$BACKUP_BASE_DIR" -name "*.tar.gz" -mtime +$RETENTION_DAYS -delete
log "Backup process complete"