293 lines
10 KiB
Python
293 lines
10 KiB
Python
#!/usr/bin/env python3
|
|
"""Plan atomic fleet config sync releases.
|
|
|
|
Refs: timmy-home #550
|
|
|
|
Phase-3 orchestration slice:
|
|
- define a shared config-sync manifest for fleet hosts
|
|
- fingerprint the exact config payload into one release id
|
|
- generate per-host staging paths and atomic symlink-swap promotion metadata
|
|
- stay dry-run by default so rollout planning is safe to verify locally
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
|
|
import argparse
|
|
import hashlib
|
|
import json
|
|
from pathlib import Path
|
|
from typing import Any
|
|
|
|
import yaml
|
|
|
|
|
|
DEFAULT_INVENTORY_FILE = Path(__file__).resolve().parents[1] / "ansible" / "inventory" / "hosts.ini"
|
|
|
|
|
|
def load_inventory_hosts(path: str | Path) -> dict[str, dict[str, str]]:
|
|
hosts: dict[str, dict[str, str]] = {}
|
|
section = None
|
|
for raw_line in Path(path).read_text(encoding="utf-8").splitlines():
|
|
line = raw_line.strip()
|
|
if not line or line.startswith("#") or line.startswith(";"):
|
|
continue
|
|
if line.startswith("[") and line.endswith("]"):
|
|
section = line[1:-1].strip().lower()
|
|
continue
|
|
if section != "fleet":
|
|
continue
|
|
|
|
parts = line.split()
|
|
host = parts[0]
|
|
metadata = {"host": host}
|
|
for token in parts[1:]:
|
|
if "=" not in token:
|
|
continue
|
|
key, value = token.split("=", 1)
|
|
metadata[key] = value
|
|
hosts[host] = metadata
|
|
|
|
if not hosts:
|
|
raise ValueError("inventory defines no [fleet] hosts")
|
|
return hosts
|
|
|
|
|
|
|
|
def load_manifest(path: str | Path) -> dict[str, Any]:
|
|
data = yaml.safe_load(Path(path).read_text(encoding="utf-8")) or {}
|
|
if not isinstance(data, dict):
|
|
raise ValueError("manifest must contain a YAML object")
|
|
data.setdefault("fleet_name", "timmy-fleet-config")
|
|
data.setdefault("targets", [])
|
|
if not isinstance(data["targets"], list):
|
|
raise ValueError("targets must be a list")
|
|
return data
|
|
|
|
|
|
|
|
def _normalize_relative_path(value: str) -> str:
|
|
path = Path(value)
|
|
if path.is_absolute():
|
|
raise ValueError(f"sync file path must be relative: {value}")
|
|
if any(part == ".." for part in path.parts):
|
|
raise ValueError(f"sync file path may not escape source root: {value}")
|
|
normalized = path.as_posix()
|
|
if normalized in {"", "."}:
|
|
raise ValueError("sync file path may not be empty")
|
|
return normalized
|
|
|
|
|
|
|
|
def validate_manifest(manifest: dict[str, Any], inventory_hosts: dict[str, dict[str, str]]) -> None:
|
|
targets = manifest.get("targets", [])
|
|
if not targets:
|
|
raise ValueError("manifest must define at least one sync target")
|
|
|
|
seen_hosts: set[str] = set()
|
|
for target in targets:
|
|
if not isinstance(target, dict):
|
|
raise ValueError("each target must be a mapping")
|
|
|
|
host = str(target.get("host", "")).strip()
|
|
if not host:
|
|
raise ValueError("each target must declare a host")
|
|
if host in seen_hosts:
|
|
raise ValueError(f"duplicate target host: {host}")
|
|
if host not in inventory_hosts:
|
|
raise ValueError(f"unknown inventory host: {host}")
|
|
seen_hosts.add(host)
|
|
|
|
config_root = str(target.get("config_root", "")).strip()
|
|
if not config_root:
|
|
raise ValueError(f"target {host} missing config_root")
|
|
|
|
files = target.get("files")
|
|
if not isinstance(files, list) or not files:
|
|
raise ValueError(f"target {host} must declare at least one file")
|
|
|
|
normalized: list[str] = []
|
|
for entry in files:
|
|
normalized.append(_normalize_relative_path(str(entry)))
|
|
if len(set(normalized)) != len(normalized):
|
|
raise ValueError(f"target {host} declares duplicate file paths")
|
|
|
|
|
|
|
|
def _hash_file(path: Path) -> str:
|
|
return hashlib.sha256(path.read_bytes()).hexdigest()
|
|
|
|
|
|
|
|
def _collect_target_files(source_root: Path, rel_paths: list[str]) -> list[dict[str, Any]]:
|
|
items: list[dict[str, Any]] = []
|
|
for rel_path in sorted(_normalize_relative_path(path) for path in rel_paths):
|
|
source_path = source_root / rel_path
|
|
if not source_path.exists():
|
|
raise FileNotFoundError(f"missing source file: {rel_path}")
|
|
if not source_path.is_file():
|
|
raise ValueError(f"sync source must be a file: {rel_path}")
|
|
items.append(
|
|
{
|
|
"relative_path": rel_path,
|
|
"source": str(source_path),
|
|
"sha256": _hash_file(source_path),
|
|
"size": source_path.stat().st_size,
|
|
}
|
|
)
|
|
return items
|
|
|
|
|
|
|
|
def compute_release_id(target_payloads: list[dict[str, Any]]) -> str:
|
|
digest = hashlib.sha256()
|
|
for target in sorted(target_payloads, key=lambda item: item["host"]):
|
|
digest.update(target["host"].encode("utf-8"))
|
|
digest.update(b"\0")
|
|
for file_item in sorted(target["files"], key=lambda item: item["relative_path"]):
|
|
digest.update(file_item["relative_path"].encode("utf-8"))
|
|
digest.update(b"\0")
|
|
digest.update(file_item["sha256"].encode("utf-8"))
|
|
digest.update(b"\0")
|
|
digest.update(str(file_item["size"]).encode("utf-8"))
|
|
digest.update(b"\0")
|
|
return digest.hexdigest()[:12]
|
|
|
|
|
|
|
|
def build_rollout_plan(
|
|
manifest: dict[str, Any],
|
|
inventory_hosts: dict[str, dict[str, str]],
|
|
*,
|
|
source_root: str | Path,
|
|
) -> dict[str, Any]:
|
|
validate_manifest(manifest, inventory_hosts)
|
|
|
|
source_root = Path(source_root)
|
|
if not source_root.exists():
|
|
raise FileNotFoundError(f"source root not found: {source_root}")
|
|
if not source_root.is_dir():
|
|
raise ValueError(f"source root must be a directory: {source_root}")
|
|
|
|
staged_targets: list[dict[str, Any]] = []
|
|
for target in sorted(manifest["targets"], key=lambda item: item["host"]):
|
|
host = target["host"]
|
|
files = _collect_target_files(source_root, target["files"])
|
|
staged_targets.append(
|
|
{
|
|
"host": host,
|
|
"inventory": inventory_hosts[host],
|
|
"config_root": str(target["config_root"]),
|
|
"files": files,
|
|
}
|
|
)
|
|
|
|
release_id = compute_release_id(staged_targets)
|
|
total_bytes = 0
|
|
file_count = 0
|
|
rendered_targets: list[dict[str, Any]] = []
|
|
for target in staged_targets:
|
|
config_root = target["config_root"].rstrip("/")
|
|
stage_root = f"{config_root}/.releases/{release_id}"
|
|
live_symlink = f"{config_root}/current"
|
|
previous_symlink = f"{config_root}/previous"
|
|
file_count += len(target["files"])
|
|
total_bytes += sum(item["size"] for item in target["files"])
|
|
rendered_targets.append(
|
|
{
|
|
"host": target["host"],
|
|
"ansible_host": target["inventory"].get("ansible_host", ""),
|
|
"ansible_user": target["inventory"].get("ansible_user", ""),
|
|
"config_root": config_root,
|
|
"stage_root": stage_root,
|
|
"live_symlink": live_symlink,
|
|
"previous_symlink": previous_symlink,
|
|
"files": target["files"],
|
|
"promote": {
|
|
"mode": "symlink_swap",
|
|
"release_id": release_id,
|
|
"from": stage_root,
|
|
"to": live_symlink,
|
|
"backup_link": previous_symlink,
|
|
},
|
|
}
|
|
)
|
|
|
|
return {
|
|
"fleet_name": manifest.get("fleet_name", "timmy-fleet-config"),
|
|
"source_root": str(source_root),
|
|
"release_id": release_id,
|
|
"target_count": len(rendered_targets),
|
|
"file_count": file_count,
|
|
"total_bytes": total_bytes,
|
|
"targets": rendered_targets,
|
|
}
|
|
|
|
|
|
|
|
def render_markdown(plan: dict[str, Any]) -> str:
|
|
lines = [
|
|
"# Fleet Config Sync Plan",
|
|
"",
|
|
f"Fleet: {plan['fleet_name']}",
|
|
f"Release ID: `{plan['release_id']}`",
|
|
f"Source root: `{plan['source_root']}`",
|
|
f"Target count: {plan['target_count']}",
|
|
f"File count: {plan['file_count']}",
|
|
f"Total bytes: {plan['total_bytes']}",
|
|
"",
|
|
"Atomic promote via symlink swap keeps every host on one named release boundary.",
|
|
"",
|
|
"| Host | Address | Stage root | Live symlink | Files |",
|
|
"|---|---|---|---|---:|",
|
|
]
|
|
|
|
for target in plan["targets"]:
|
|
lines.append(
|
|
f"| {target['host']} | {target['ansible_host'] or 'n/a'} | `{target['stage_root']}` | `{target['live_symlink']}` | {len(target['files'])} |"
|
|
)
|
|
|
|
lines.extend(["", "## Target file manifests", ""])
|
|
for target in plan["targets"]:
|
|
lines.extend(
|
|
[
|
|
f"### {target['host']}",
|
|
"",
|
|
f"- Promote: `{target['promote']['from']}` -> `{target['promote']['to']}`",
|
|
f"- Backup link: `{target['promote']['backup_link']}`",
|
|
"",
|
|
"| Relative path | Bytes | SHA256 |",
|
|
"|---|---:|---|",
|
|
]
|
|
)
|
|
for file_item in target["files"]:
|
|
lines.append(
|
|
f"| `{file_item['relative_path']}` | {file_item['size']} | `{file_item['sha256'][:16]}…` |"
|
|
)
|
|
lines.append("")
|
|
|
|
return "\n".join(lines).rstrip() + "\n"
|
|
|
|
|
|
|
|
def main() -> int:
|
|
parser = argparse.ArgumentParser(description="Plan a dry-run atomic config sync release across fleet hosts")
|
|
parser.add_argument("manifest", help="Path to fleet config sync manifest YAML")
|
|
parser.add_argument("--inventory", default=str(DEFAULT_INVENTORY_FILE), help="Path to Ansible fleet inventory")
|
|
parser.add_argument("--source-root", default=".", help="Local source root containing files listed in the manifest")
|
|
parser.add_argument("--markdown", action="store_true", help="Render markdown instead of JSON")
|
|
args = parser.parse_args()
|
|
|
|
inventory = load_inventory_hosts(args.inventory)
|
|
manifest = load_manifest(args.manifest)
|
|
plan = build_rollout_plan(manifest, inventory, source_root=args.source_root)
|
|
|
|
if args.markdown:
|
|
print(render_markdown(plan))
|
|
else:
|
|
print(json.dumps(plan, indent=2))
|
|
return 0
|
|
|
|
|
|
if __name__ == "__main__":
|
|
raise SystemExit(main())
|