171 lines
5.4 KiB
Bash
171 lines
5.4 KiB
Bash
#!/usr/bin/env bash
|
|
# backup_pipeline.sh — Nightly encrypted Hermes backup pipeline
|
|
# Refs: timmy-home #693, timmy-home #561
|
|
set -euo pipefail
|
|
|
|
DATESTAMP="${BACKUP_TIMESTAMP:-$(date +%Y%m%d-%H%M%S)}"
|
|
BACKUP_SOURCE_DIR="${BACKUP_SOURCE_DIR:-${HOME}/.hermes}"
|
|
BACKUP_ROOT="${BACKUP_ROOT:-${HOME}/.timmy-backups/hermes}"
|
|
BACKUP_LOG_DIR="${BACKUP_LOG_DIR:-${BACKUP_ROOT}/logs}"
|
|
BACKUP_RETENTION_DAYS="${BACKUP_RETENTION_DAYS:-14}"
|
|
BACKUP_S3_URI="${BACKUP_S3_URI:-}"
|
|
BACKUP_NAS_TARGET="${BACKUP_NAS_TARGET:-}"
|
|
AWS_ENDPOINT_URL="${AWS_ENDPOINT_URL:-}"
|
|
BACKUP_NAME="hermes-backup-${DATESTAMP}"
|
|
LOCAL_BACKUP_DIR="${BACKUP_ROOT}/${DATESTAMP}"
|
|
STAGE_DIR="$(mktemp -d "${TMPDIR:-/tmp}/timmy-backup.XXXXXX")"
|
|
PLAINTEXT_ARCHIVE="${STAGE_DIR}/${BACKUP_NAME}.tar.gz"
|
|
ENCRYPTED_ARCHIVE="${STAGE_DIR}/${BACKUP_NAME}.tar.gz.enc"
|
|
MANIFEST_PATH="${STAGE_DIR}/${BACKUP_NAME}.json"
|
|
ALERT_LOG="${BACKUP_LOG_DIR}/backup_pipeline.log"
|
|
PASSFILE_CLEANUP=""
|
|
|
|
mkdir -p "$BACKUP_LOG_DIR"
|
|
|
|
log() {
|
|
echo "[$(date -Iseconds)] $1" | tee -a "$ALERT_LOG"
|
|
}
|
|
|
|
fail() {
|
|
log "ERROR: $1"
|
|
exit 1
|
|
}
|
|
|
|
cleanup() {
|
|
rm -f "$PLAINTEXT_ARCHIVE"
|
|
rm -rf "$STAGE_DIR"
|
|
if [[ -n "$PASSFILE_CLEANUP" && -f "$PASSFILE_CLEANUP" ]]; then
|
|
rm -f "$PASSFILE_CLEANUP"
|
|
fi
|
|
}
|
|
trap cleanup EXIT
|
|
|
|
resolve_passphrase_file() {
|
|
if [[ -n "${BACKUP_PASSPHRASE_FILE:-}" ]]; then
|
|
[[ -f "$BACKUP_PASSPHRASE_FILE" ]] || fail "BACKUP_PASSPHRASE_FILE does not exist: $BACKUP_PASSPHRASE_FILE"
|
|
echo "$BACKUP_PASSPHRASE_FILE"
|
|
return
|
|
fi
|
|
|
|
if [[ -n "${BACKUP_PASSPHRASE:-}" ]]; then
|
|
PASSFILE_CLEANUP="${STAGE_DIR}/backup.passphrase"
|
|
printf '%s' "$BACKUP_PASSPHRASE" > "$PASSFILE_CLEANUP"
|
|
chmod 600 "$PASSFILE_CLEANUP"
|
|
echo "$PASSFILE_CLEANUP"
|
|
return
|
|
fi
|
|
|
|
fail "Set BACKUP_PASSPHRASE_FILE or BACKUP_PASSPHRASE before running the backup pipeline."
|
|
}
|
|
|
|
sha256_file() {
|
|
local path="$1"
|
|
if command -v shasum >/dev/null 2>&1; then
|
|
shasum -a 256 "$path" | awk '{print $1}'
|
|
elif command -v sha256sum >/dev/null 2>&1; then
|
|
sha256sum "$path" | awk '{print $1}'
|
|
else
|
|
python3 - <<'PY' "$path"
|
|
import hashlib
|
|
import pathlib
|
|
import sys
|
|
path = pathlib.Path(sys.argv[1])
|
|
h = hashlib.sha256()
|
|
with path.open('rb') as f:
|
|
for chunk in iter(lambda: f.read(1024 * 1024), b''):
|
|
h.update(chunk)
|
|
print(h.hexdigest())
|
|
PY
|
|
fi
|
|
}
|
|
|
|
write_manifest() {
|
|
python3 - <<'PY' "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8"
|
|
import json
|
|
import sys
|
|
manifest_path, source_dir, archive_name, archive_sha256, local_dir, s3_uri, nas_target, created_at = sys.argv[1:]
|
|
manifest = {
|
|
"created_at": created_at,
|
|
"source_dir": source_dir,
|
|
"archive_name": archive_name,
|
|
"archive_sha256": archive_sha256,
|
|
"encryption": {
|
|
"type": "openssl",
|
|
"cipher": "aes-256-cbc",
|
|
"pbkdf2": True,
|
|
"iterations": 200000,
|
|
},
|
|
"destinations": {
|
|
"local_dir": local_dir,
|
|
"s3_uri": s3_uri or None,
|
|
"nas_target": nas_target or None,
|
|
},
|
|
}
|
|
with open(manifest_path, 'w', encoding='utf-8') as handle:
|
|
json.dump(manifest, handle, indent=2)
|
|
handle.write('\n')
|
|
PY
|
|
}
|
|
|
|
upload_to_nas() {
|
|
local archive_path="$1"
|
|
local manifest_path="$2"
|
|
local target_root="$3"
|
|
|
|
local target_dir="${target_root%/}/${DATESTAMP}"
|
|
mkdir -p "$target_dir"
|
|
cp "$archive_path" "$manifest_path" "$target_dir/"
|
|
log "Uploaded backup to NAS target: $target_dir"
|
|
}
|
|
|
|
upload_to_s3() {
|
|
local archive_path="$1"
|
|
local manifest_path="$2"
|
|
|
|
command -v aws >/dev/null 2>&1 || fail "BACKUP_S3_URI is set but aws CLI is not installed."
|
|
|
|
local args=()
|
|
if [[ -n "$AWS_ENDPOINT_URL" ]]; then
|
|
args+=(--endpoint-url "$AWS_ENDPOINT_URL")
|
|
fi
|
|
|
|
aws "${args[@]}" s3 cp "$archive_path" "${BACKUP_S3_URI%/}/$(basename "$archive_path")"
|
|
aws "${args[@]}" s3 cp "$manifest_path" "${BACKUP_S3_URI%/}/$(basename "$manifest_path")"
|
|
log "Uploaded backup to S3 target: $BACKUP_S3_URI"
|
|
}
|
|
|
|
[[ -d "$BACKUP_SOURCE_DIR" ]] || fail "BACKUP_SOURCE_DIR does not exist: $BACKUP_SOURCE_DIR"
|
|
[[ -n "$BACKUP_NAS_TARGET" || -n "$BACKUP_S3_URI" ]] || fail "Set BACKUP_NAS_TARGET or BACKUP_S3_URI for remote backup storage."
|
|
|
|
PASSFILE="$(resolve_passphrase_file)"
|
|
mkdir -p "$LOCAL_BACKUP_DIR"
|
|
|
|
log "Creating archive from $BACKUP_SOURCE_DIR"
|
|
tar -czf "$PLAINTEXT_ARCHIVE" -C "$(dirname "$BACKUP_SOURCE_DIR")" "$(basename "$BACKUP_SOURCE_DIR")"
|
|
|
|
log "Encrypting archive"
|
|
openssl enc -aes-256-cbc -salt -pbkdf2 -iter 200000 \
|
|
-pass "file:${PASSFILE}" \
|
|
-in "$PLAINTEXT_ARCHIVE" \
|
|
-out "$ENCRYPTED_ARCHIVE"
|
|
|
|
ARCHIVE_SHA256="$(sha256_file "$ENCRYPTED_ARCHIVE")"
|
|
CREATED_AT="$(date -u '+%Y-%m-%dT%H:%M:%SZ')"
|
|
write_manifest "$MANIFEST_PATH" "$BACKUP_SOURCE_DIR" "$(basename "$ENCRYPTED_ARCHIVE")" "$ARCHIVE_SHA256" "$LOCAL_BACKUP_DIR" "$BACKUP_S3_URI" "$BACKUP_NAS_TARGET" "$CREATED_AT"
|
|
|
|
cp "$ENCRYPTED_ARCHIVE" "$MANIFEST_PATH" "$LOCAL_BACKUP_DIR/"
|
|
rm -f "$PLAINTEXT_ARCHIVE"
|
|
log "Encrypted backup stored locally: ${LOCAL_BACKUP_DIR}/$(basename "$ENCRYPTED_ARCHIVE")"
|
|
|
|
if [[ -n "$BACKUP_NAS_TARGET" ]]; then
|
|
upload_to_nas "$ENCRYPTED_ARCHIVE" "$MANIFEST_PATH" "$BACKUP_NAS_TARGET"
|
|
fi
|
|
|
|
if [[ -n "$BACKUP_S3_URI" ]]; then
|
|
upload_to_s3 "$ENCRYPTED_ARCHIVE" "$MANIFEST_PATH"
|
|
fi
|
|
|
|
find "$BACKUP_ROOT" -mindepth 1 -maxdepth 1 -type d -name '20*' -mtime "+${BACKUP_RETENTION_DAYS}" -exec rm -rf {} + 2>/dev/null || true
|
|
log "Retention applied (${BACKUP_RETENTION_DAYS} days)"
|
|
log "Backup pipeline completed successfully"
|