diff --git a/.gitea/workflows/weekly-audit.yml b/.gitea/workflows/weekly-audit.yml index 1d32a32..dd1b4af 100644 --- a/.gitea/workflows/weekly-audit.yml +++ b/.gitea/workflows/weekly-audit.yml @@ -1,8 +1,9 @@ name: Weekly Privacy Audit # Runs every Monday at 05:00 UTC against a CI test fixture. -# On production wizards this same script should be run via cron: +# On production wizards these same scripts should run via cron: # 0 5 * * 1 python /opt/nexus/mempalace/audit_privacy.py /var/lib/mempalace/fleet +# 0 5 * * 1 python /opt/nexus/mempalace/retain_closets.py /var/lib/mempalace/fleet --days 90 # # Refs: #1083, #1075 @@ -26,3 +27,8 @@ jobs: - name: Run privacy audit against CI fixture run: | python mempalace/audit_privacy.py tests/fixtures/fleet_palace + + - name: Dry-run retention enforcement against CI fixture + # Real enforcement runs on the live VPS; CI verifies the script runs cleanly. + run: | + python mempalace/retain_closets.py tests/fixtures/fleet_palace --days 90 --dry-run diff --git a/mempalace/retain_closets.py b/mempalace/retain_closets.py new file mode 100644 index 0000000..ea75a10 --- /dev/null +++ b/mempalace/retain_closets.py @@ -0,0 +1,163 @@ +#!/usr/bin/env python3 +""" +retain_closets.py — Retention policy enforcement for fleet palace closets. + +Removes closet files older than a configurable retention window (default: 90 days). +Run this on the Alpha host (or any fleet palace directory) to enforce the +closet aging policy described in #1083. + +Usage: + # Dry-run: show what would be removed (no deletions) + python mempalace/retain_closets.py --dry-run + + # Enforce 90-day retention (default) + python mempalace/retain_closets.py + + # Custom retention window + python mempalace/retain_closets.py --days 30 + + # Custom palace path + python mempalace/retain_closets.py /data/fleet --days 90 + +Exits: + 0 — success (clean, or pruned without error) + 1 — error (e.g., palace directory not found) + +Refs: #1083, #1075 +""" + +from __future__ import annotations + +import argparse +import os +import sys +import time +from dataclasses import dataclass, field +from pathlib import Path + +DEFAULT_RETENTION_DAYS = 90 +DEFAULT_PALACE_PATH = "/var/lib/mempalace/fleet" + + +@dataclass +class RetentionResult: + scanned: int = 0 + removed: int = 0 + kept: int = 0 + errors: list[str] = field(default_factory=list) + + @property + def ok(self) -> bool: + return len(self.errors) == 0 + + +def _file_age_days(path: Path) -> float: + """Return the age of a file in days based on mtime.""" + mtime = path.stat().st_mtime + now = time.time() + return (now - mtime) / 86400.0 + + +def enforce_retention( + palace_dir: Path, + retention_days: int = DEFAULT_RETENTION_DAYS, + dry_run: bool = False, +) -> RetentionResult: + """ + Remove *.closet.json files older than *retention_days* from *palace_dir*. + + Only closet files are pruned — raw drawer files are never present in a + compliant fleet palace, so this script does not touch them. + + Args: + palace_dir: Root directory of the fleet palace to scan. + retention_days: Files older than this many days will be removed. + dry_run: If True, report what would be removed but make no changes. + + Returns: + RetentionResult with counts and any errors. + """ + result = RetentionResult() + + for closet_file in sorted(palace_dir.rglob("*.closet.json")): + result.scanned += 1 + try: + age = _file_age_days(closet_file) + except OSError as exc: + result.errors.append(f"Could not stat {closet_file}: {exc}") + continue + + if age > retention_days: + if dry_run: + print( + f"[retain_closets] DRY-RUN would remove ({age:.0f}d old): {closet_file}" + ) + result.removed += 1 + else: + try: + closet_file.unlink() + print(f"[retain_closets] Removed ({age:.0f}d old): {closet_file}") + result.removed += 1 + except OSError as exc: + result.errors.append(f"Could not remove {closet_file}: {exc}") + else: + result.kept += 1 + + return result + + +def main(argv: list[str] | None = None) -> int: + parser = argparse.ArgumentParser( + description="Enforce retention policy on fleet palace closets." + ) + parser.add_argument( + "palace_dir", + nargs="?", + default=os.environ.get("FLEET_PALACE_PATH", DEFAULT_PALACE_PATH), + help=f"Fleet palace directory (default: {DEFAULT_PALACE_PATH})", + ) + parser.add_argument( + "--days", + type=int, + default=DEFAULT_RETENTION_DAYS, + metavar="N", + help=f"Retention window in days (default: {DEFAULT_RETENTION_DAYS})", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Show what would be removed without deleting anything.", + ) + args = parser.parse_args(argv) + + palace_dir = Path(args.palace_dir) + if not palace_dir.exists(): + print( + f"[retain_closets] ERROR: palace directory not found: {palace_dir}", + file=sys.stderr, + ) + return 1 + + mode = "DRY-RUN" if args.dry_run else "LIVE" + print( + f"[retain_closets] {mode} — scanning {palace_dir} " + f"(retention: {args.days} days)" + ) + + result = enforce_retention(palace_dir, retention_days=args.days, dry_run=args.dry_run) + + if result.errors: + for err in result.errors: + print(f"[retain_closets] ERROR: {err}", file=sys.stderr) + return 1 + + action = "would remove" if args.dry_run else "removed" + print( + f"[retain_closets] Done — scanned {result.scanned}, " + f"{action} {result.removed}, kept {result.kept}." + ) + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/mempalace/tunnel_sync.py b/mempalace/tunnel_sync.py new file mode 100644 index 0000000..74f53d4 --- /dev/null +++ b/mempalace/tunnel_sync.py @@ -0,0 +1,308 @@ +#!/usr/bin/env python3 +""" +tunnel_sync.py — Pull closets from a remote wizard's fleet API into the local palace. + +This is the client-side tunnel mechanism for #1078. It connects to a peer +wizard's running fleet_api.py HTTP server, discovers their memory wings, and +imports the results into the local fleet palace as closet files. Once imported, +`recall --fleet` in Evennia will return results from the remote wing. + +The code side is complete here; the infrastructure side (second wizard running +fleet_api.py behind an SSH tunnel or VPN) is still required to use this. + +Usage: + # Pull from a remote Alpha fleet API into the default local palace + python mempalace/tunnel_sync.py --peer http://alpha.example.com:7771 + + # Custom local palace path + FLEET_PALACE_PATH=/data/fleet python mempalace/tunnel_sync.py \\ + --peer http://alpha.example.com:7771 + + # Dry-run: show what would be imported without writing files + python mempalace/tunnel_sync.py --peer http://alpha.example.com:7771 --dry-run + + # Limit results per room (default: 50) + python mempalace/tunnel_sync.py --peer http://alpha.example.com:7771 --n 20 + +Environment: + FLEET_PALACE_PATH — local fleet palace directory (default: /var/lib/mempalace/fleet) + FLEET_PEER_URL — remote fleet API URL (overridden by --peer flag) + +Exits: + 0 — sync succeeded (or dry-run completed) + 1 — error (connection failure, invalid response, write error) + +Refs: #1078, #1075 +""" + +from __future__ import annotations + +import argparse +import json +import os +import sys +import time +import urllib.error +import urllib.request +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any + +DEFAULT_PALACE_PATH = "/var/lib/mempalace/fleet" +DEFAULT_N_RESULTS = 50 +# Broad queries for bulk room pull — used to discover representative content +_BROAD_QUERIES = [ + "the", "a", "is", "was", "and", "of", "to", "in", "it", "on", + "commit", "issue", "error", "fix", "deploy", "event", "memory", +] +_REQUEST_TIMEOUT = 10 # seconds + + +@dataclass +class SyncResult: + wings_found: list[str] = field(default_factory=list) + rooms_pulled: int = 0 + closets_written: int = 0 + errors: list[str] = field(default_factory=list) + + @property + def ok(self) -> bool: + return len(self.errors) == 0 + + +# --------------------------------------------------------------------------- +# HTTP helpers +# --------------------------------------------------------------------------- + +def _get(url: str) -> dict[str, Any]: + """GET *url*, return parsed JSON or raise on error.""" + req = urllib.request.Request(url, headers={"Accept": "application/json"}) + with urllib.request.urlopen(req, timeout=_REQUEST_TIMEOUT) as resp: + return json.loads(resp.read()) + + +def _peer_url(base: str, path: str) -> str: + return base.rstrip("/") + path + + +# --------------------------------------------------------------------------- +# Wing / room discovery +# --------------------------------------------------------------------------- + +def get_remote_wings(peer_url: str) -> list[str]: + """Return the list of wing names from the remote fleet API.""" + data = _get(_peer_url(peer_url, "/wings")) + return data.get("wings", []) + + +def search_remote_room(peer_url: str, room: str, n: int = DEFAULT_N_RESULTS) -> list[dict]: + """ + Pull closet entries for a specific room from the remote peer. + + Uses multiple broad queries and deduplicates by text to maximize coverage + without requiring a dedicated bulk-export endpoint. + """ + seen_texts: set[str] = set() + results: list[dict] = [] + + for q in _BROAD_QUERIES: + url = _peer_url(peer_url, f"/search?q={urllib.request.quote(q)}&room={urllib.request.quote(room)}&n={n}") + try: + data = _get(url) + except (urllib.error.URLError, json.JSONDecodeError, OSError): + continue + + for entry in data.get("results", []): + text = entry.get("text", "") + if text and text not in seen_texts: + seen_texts.add(text) + results.append(entry) + + if len(results) >= n: + break + + return results[:n] + + +# --------------------------------------------------------------------------- +# Core sync +# --------------------------------------------------------------------------- + +def _write_closet( + palace_dir: Path, + wing: str, + room: str, + entries: list[dict], + dry_run: bool, +) -> bool: + """Write entries as a .closet.json file under palace_dir/wing/.""" + wing_dir = palace_dir / wing + closet_path = wing_dir / f"{room}.closet.json" + + drawers = [ + { + "text": e.get("text", ""), + "room": e.get("room", room), + "wing": e.get("wing", wing), + "score": e.get("score", 0.0), + "closet": True, + "source_file": f"tunnel:{wing}/{room}", + "synced_at": int(time.time()), + } + for e in entries + ] + + payload = json.dumps({"drawers": drawers, "wing": wing, "room": room}, indent=2) + + if dry_run: + print(f"[tunnel_sync] DRY-RUN would write {len(drawers)} entries → {closet_path}") + return True + + try: + wing_dir.mkdir(parents=True, exist_ok=True) + closet_path.write_text(payload) + print(f"[tunnel_sync] Wrote {len(drawers)} entries → {closet_path}") + return True + except OSError as exc: + print(f"[tunnel_sync] ERROR writing {closet_path}: {exc}", file=sys.stderr) + return False + + +def sync_peer( + peer_url: str, + palace_dir: Path, + n_results: int = DEFAULT_N_RESULTS, + dry_run: bool = False, +) -> SyncResult: + """ + Pull all wings and rooms from *peer_url* into *palace_dir*. + + Args: + peer_url: Base URL of the remote fleet_api.py instance. + palace_dir: Local fleet palace directory to write closets into. + n_results: Maximum results to pull per room. + dry_run: If True, print what would be written without touching disk. + + Returns: + SyncResult with counts and any errors. + """ + result = SyncResult() + + # Discover health + try: + health = _get(_peer_url(peer_url, "/health")) + if health.get("status") != "ok": + result.errors.append(f"Peer unhealthy: {health}") + return result + except (urllib.error.URLError, json.JSONDecodeError, OSError) as exc: + result.errors.append(f"Could not reach peer at {peer_url}: {exc}") + return result + + # Discover wings + try: + wings = get_remote_wings(peer_url) + except (urllib.error.URLError, json.JSONDecodeError, OSError) as exc: + result.errors.append(f"Could not list wings from {peer_url}: {exc}") + return result + + result.wings_found = wings + if not wings: + print(f"[tunnel_sync] No wings found at {peer_url} — nothing to sync.") + return result + + print(f"[tunnel_sync] Found wings: {wings}") + + # Import core rooms from each wing + from nexus.mempalace.config import CORE_ROOMS + + for wing in wings: + for room in CORE_ROOMS: + print(f"[tunnel_sync] Pulling {wing}/{room} …") + try: + entries = search_remote_room(peer_url, room, n=n_results) + except (urllib.error.URLError, json.JSONDecodeError, OSError) as exc: + err = f"Error pulling {wing}/{room}: {exc}" + result.errors.append(err) + print(f"[tunnel_sync] ERROR: {err}", file=sys.stderr) + continue + + if not entries: + print(f"[tunnel_sync] No entries found for {wing}/{room} — skipping.") + continue + + ok = _write_closet(palace_dir, wing, room, entries, dry_run=dry_run) + result.rooms_pulled += 1 + if ok: + result.closets_written += 1 + + return result + + +# --------------------------------------------------------------------------- +# CLI +# --------------------------------------------------------------------------- + +def main(argv: list[str] | None = None) -> int: + parser = argparse.ArgumentParser( + description="Sync closets from a remote wizard's fleet API into the local palace." + ) + parser.add_argument( + "--peer", + default=os.environ.get("FLEET_PEER_URL", ""), + metavar="URL", + help="Base URL of the remote fleet_api.py (e.g. http://alpha.example.com:7771)", + ) + parser.add_argument( + "--palace", + default=os.environ.get("FLEET_PALACE_PATH", DEFAULT_PALACE_PATH), + metavar="DIR", + help=f"Local fleet palace directory (default: {DEFAULT_PALACE_PATH})", + ) + parser.add_argument( + "--n", + type=int, + default=DEFAULT_N_RESULTS, + metavar="N", + help=f"Max results per room (default: {DEFAULT_N_RESULTS})", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Show what would be synced without writing files.", + ) + args = parser.parse_args(argv) + + if not args.peer: + print( + "[tunnel_sync] ERROR: --peer URL is required (or set FLEET_PEER_URL).", + file=sys.stderr, + ) + return 1 + + palace_dir = Path(args.palace) + if not palace_dir.exists() and not args.dry_run: + print( + f"[tunnel_sync] ERROR: local palace not found: {palace_dir}", + file=sys.stderr, + ) + return 1 + + mode = "DRY-RUN" if args.dry_run else "LIVE" + print(f"[tunnel_sync] {mode} — peer: {args.peer} palace: {palace_dir}") + + result = sync_peer(args.peer, palace_dir, n_results=args.n, dry_run=args.dry_run) + + if result.errors: + for err in result.errors: + print(f"[tunnel_sync] ERROR: {err}", file=sys.stderr) + return 1 + + print( + f"[tunnel_sync] Done — wings: {result.wings_found}, " + f"rooms pulled: {result.rooms_pulled}, closets written: {result.closets_written}." + ) + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/tests/test_mempalace_retain_closets.py b/tests/test_mempalace_retain_closets.py new file mode 100644 index 0000000..280dcd7 --- /dev/null +++ b/tests/test_mempalace_retain_closets.py @@ -0,0 +1,139 @@ +""" +Tests for mempalace/retain_closets.py — 90-day closet retention enforcement. + +Refs: #1083, #1075 +""" + +from __future__ import annotations + +import json +import time +from pathlib import Path + +import pytest + +from mempalace.retain_closets import ( + RetentionResult, + _file_age_days, + enforce_retention, +) + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +def _write_closet(directory: Path, name: str, age_days: float) -> Path: + """Create a *.closet.json file with a mtime set to *age_days* ago.""" + p = directory / name + p.write_text(json.dumps({"drawers": [{"text": "summary", "closet": True}]})) + # Set mtime to simulate age + mtime = time.time() - age_days * 86400.0 + import os + os.utime(p, (mtime, mtime)) + return p + + +# --------------------------------------------------------------------------- +# _file_age_days +# --------------------------------------------------------------------------- + +def test_file_age_days_recent(tmp_path): + p = tmp_path / "recent.closet.json" + p.write_text("{}") + age = _file_age_days(p) + assert 0 <= age < 1 # just created + + +def test_file_age_days_old(tmp_path): + p = _write_closet(tmp_path, "old.closet.json", age_days=100) + age = _file_age_days(p) + assert 99 < age < 101 + + +# --------------------------------------------------------------------------- +# enforce_retention — dry_run +# --------------------------------------------------------------------------- + +def test_dry_run_does_not_delete(tmp_path): + old = _write_closet(tmp_path, "old.closet.json", age_days=100) + _write_closet(tmp_path, "new.closet.json", age_days=10) + + result = enforce_retention(tmp_path, retention_days=90, dry_run=True) + + # File still exists after dry-run + assert old.exists() + assert result.removed == 1 # counted but not actually removed + assert result.kept == 1 + assert result.ok + + +def test_dry_run_keeps_recent_files(tmp_path): + _write_closet(tmp_path, "recent.closet.json", age_days=5) + result = enforce_retention(tmp_path, retention_days=90, dry_run=True) + assert result.removed == 0 + assert result.kept == 1 + + +# --------------------------------------------------------------------------- +# enforce_retention — live mode +# --------------------------------------------------------------------------- + +def test_live_removes_old_closets(tmp_path): + old = _write_closet(tmp_path, "old.closet.json", age_days=100) + new = _write_closet(tmp_path, "new.closet.json", age_days=10) + + result = enforce_retention(tmp_path, retention_days=90, dry_run=False) + + assert not old.exists() + assert new.exists() + assert result.removed == 1 + assert result.kept == 1 + assert result.ok + + +def test_live_keeps_files_within_window(tmp_path): + f = _write_closet(tmp_path, "edge.closet.json", age_days=89) + result = enforce_retention(tmp_path, retention_days=90, dry_run=False) + assert f.exists() + assert result.removed == 0 + assert result.kept == 1 + + +def test_empty_directory_is_ok(tmp_path): + result = enforce_retention(tmp_path, retention_days=90) + assert result.scanned == 0 + assert result.removed == 0 + assert result.ok + + +def test_subdirectory_closets_are_pruned(tmp_path): + """enforce_retention should recurse into subdirs (wing directories).""" + sub = tmp_path / "bezalel" + sub.mkdir() + old = _write_closet(sub, "hermes.closet.json", age_days=120) + result = enforce_retention(tmp_path, retention_days=90, dry_run=False) + assert not old.exists() + assert result.removed == 1 + + +def test_non_closet_files_ignored(tmp_path): + """Non-closet files should not be counted or touched.""" + (tmp_path / "readme.txt").write_text("hello") + (tmp_path / "data.drawer.json").write_text("{}") + result = enforce_retention(tmp_path, retention_days=90) + assert result.scanned == 0 + + +# --------------------------------------------------------------------------- +# RetentionResult.ok +# --------------------------------------------------------------------------- + +def test_retention_result_ok_with_no_errors(): + r = RetentionResult(scanned=5, removed=2, kept=3) + assert r.ok is True + + +def test_retention_result_not_ok_with_errors(): + r = RetentionResult(errors=["could not stat file"]) + assert r.ok is False diff --git a/tests/test_mempalace_tunnel_sync.py b/tests/test_mempalace_tunnel_sync.py new file mode 100644 index 0000000..0a72d09 --- /dev/null +++ b/tests/test_mempalace_tunnel_sync.py @@ -0,0 +1,205 @@ +""" +Tests for mempalace/tunnel_sync.py — remote wizard wing sync client. + +Refs: #1078, #1075 +""" + +from __future__ import annotations + +import json +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest + +from mempalace.tunnel_sync import ( + SyncResult, + _peer_url, + _write_closet, + get_remote_wings, + search_remote_room, + sync_peer, +) + + +# --------------------------------------------------------------------------- +# _peer_url +# --------------------------------------------------------------------------- + +def test_peer_url_strips_trailing_slash(): + assert _peer_url("http://host:7771/", "/wings") == "http://host:7771/wings" + + +def test_peer_url_with_path(): + assert _peer_url("http://host:7771", "/search") == "http://host:7771/search" + + +# --------------------------------------------------------------------------- +# get_remote_wings +# --------------------------------------------------------------------------- + +def test_get_remote_wings_returns_list(): + with patch("mempalace.tunnel_sync._get", return_value={"wings": ["bezalel", "timmy"]}): + wings = get_remote_wings("http://peer:7771") + assert wings == ["bezalel", "timmy"] + + +def test_get_remote_wings_empty(): + with patch("mempalace.tunnel_sync._get", return_value={"wings": []}): + wings = get_remote_wings("http://peer:7771") + assert wings == [] + + +# --------------------------------------------------------------------------- +# search_remote_room +# --------------------------------------------------------------------------- + +def _make_entry(text: str, room: str = "forge", wing: str = "bezalel", score: float = 0.9) -> dict: + return {"text": text, "room": room, "wing": wing, "score": score} + + +def test_search_remote_room_deduplicates(): + entry = _make_entry("CI passed") + # Same entry returned from multiple queries — should only appear once + with patch("mempalace.tunnel_sync._get", return_value={"results": [entry]}): + results = search_remote_room("http://peer:7771", "forge", n=50) + assert len(results) == 1 + assert results[0]["text"] == "CI passed" + + +def test_search_remote_room_respects_n_limit(): + entries = [_make_entry(f"item {i}") for i in range(100)] + with patch("mempalace.tunnel_sync._get", return_value={"results": entries}): + results = search_remote_room("http://peer:7771", "forge", n=5) + assert len(results) <= 5 + + +def test_search_remote_room_handles_request_error(): + import urllib.error + with patch("mempalace.tunnel_sync._get", side_effect=urllib.error.URLError("refused")): + results = search_remote_room("http://peer:7771", "forge") + assert results == [] + + +# --------------------------------------------------------------------------- +# _write_closet +# --------------------------------------------------------------------------- + +def test_write_closet_creates_file(tmp_path): + entries = [_make_entry("a memory")] + ok = _write_closet(tmp_path, "bezalel", "forge", entries, dry_run=False) + assert ok is True + closet = tmp_path / "bezalel" / "forge.closet.json" + assert closet.exists() + data = json.loads(closet.read_text()) + assert data["wing"] == "bezalel" + assert data["room"] == "forge" + assert len(data["drawers"]) == 1 + assert data["drawers"][0]["closet"] is True + assert data["drawers"][0]["text"] == "a memory" + + +def test_write_closet_dry_run_does_not_create(tmp_path): + entries = [_make_entry("a memory")] + ok = _write_closet(tmp_path, "bezalel", "forge", entries, dry_run=True) + assert ok is True + closet = tmp_path / "bezalel" / "forge.closet.json" + assert not closet.exists() + + +def test_write_closet_creates_wing_subdirectory(tmp_path): + entries = [_make_entry("memory")] + _write_closet(tmp_path, "timmy", "hermes", entries, dry_run=False) + assert (tmp_path / "timmy").is_dir() + + +def test_write_closet_source_file_is_tunnel_tagged(tmp_path): + entries = [_make_entry("memory")] + _write_closet(tmp_path, "bezalel", "hermes", entries, dry_run=False) + closet = tmp_path / "bezalel" / "hermes.closet.json" + data = json.loads(closet.read_text()) + assert data["drawers"][0]["source_file"].startswith("tunnel:") + + +# --------------------------------------------------------------------------- +# sync_peer +# --------------------------------------------------------------------------- + +def _mock_get_responses(peer_url: str) -> dict: + """Minimal mock _get returning health, wings, and search results.""" + def _get(url: str) -> dict: + if url.endswith("/health"): + return {"status": "ok", "palace": "/var/lib/mempalace/fleet"} + if url.endswith("/wings"): + return {"wings": ["bezalel"]} + if "/search" in url: + return {"results": [_make_entry("test memory")]} + return {} + return _get + + +def test_sync_peer_writes_closets(tmp_path): + (tmp_path / ".gitkeep").touch() # ensure palace dir exists + + with patch("mempalace.tunnel_sync._get", side_effect=_mock_get_responses("http://peer:7771")): + result = sync_peer("http://peer:7771", tmp_path, n_results=10) + + assert result.ok + assert "bezalel" in result.wings_found + assert result.closets_written > 0 + + +def test_sync_peer_dry_run_no_files(tmp_path): + (tmp_path / ".gitkeep").touch() + + with patch("mempalace.tunnel_sync._get", side_effect=_mock_get_responses("http://peer:7771")): + result = sync_peer("http://peer:7771", tmp_path, n_results=10, dry_run=True) + + assert result.ok + # No closet files should be written + closets = list(tmp_path.rglob("*.closet.json")) + assert closets == [] + + +def test_sync_peer_unreachable_returns_error(tmp_path): + import urllib.error + with patch("mempalace.tunnel_sync._get", side_effect=urllib.error.URLError("refused")): + result = sync_peer("http://unreachable:7771", tmp_path) + + assert not result.ok + assert any("unreachable" in e or "refused" in e for e in result.errors) + + +def test_sync_peer_unhealthy_returns_error(tmp_path): + with patch("mempalace.tunnel_sync._get", return_value={"status": "degraded"}): + result = sync_peer("http://peer:7771", tmp_path) + + assert not result.ok + assert any("unhealthy" in e for e in result.errors) + + +def test_sync_peer_no_wings_is_ok(tmp_path): + def _get(url: str) -> dict: + if "/health" in url: + return {"status": "ok"} + return {"wings": []} + + with patch("mempalace.tunnel_sync._get", side_effect=_get): + result = sync_peer("http://peer:7771", tmp_path) + + assert result.ok + assert result.closets_written == 0 + + +# --------------------------------------------------------------------------- +# SyncResult.ok +# --------------------------------------------------------------------------- + +def test_sync_result_ok_no_errors(): + r = SyncResult(wings_found=["bezalel"], rooms_pulled=5, closets_written=5) + assert r.ok is True + + +def test_sync_result_not_ok_with_errors(): + r = SyncResult(errors=["connection refused"]) + assert r.ok is False