Compare commits

..

1 Commits

Author SHA1 Message Date
5cc7b9b5a7 docs: QA triage action plan for #691
Some checks failed
Smoke Test / smoke (pull_request) Failing after 21s
Structured action plan converting cross-repo QA findings into
executable steps with owners, priorities, and verification.

Key findings addressed:
- P0: Production surfaces down (DNS/nginx), playground broken
- P1: 166 open PRs across 5 repos, 58 issues with duplicates
- P2: the-door crisis features blocked, no branch protection
- P3: Burn dedup gate, nightly triage cron

Priority order:
1. Fix DNS/nginx (crisis intervention reachable)
2. Close duplicate PRs (clear noise)
3. Review the-door PRs (mission-critical)
4. Fix the-playground (user-facing)
5. Enable branch protection
6. Build dedup gate
7. Nightly triage cron

Closes #691.
2026-04-14 23:51:40 -04:00
8 changed files with 218 additions and 444 deletions

View File

@@ -1,21 +0,0 @@
fleet_rotation_backup_root: /var/lib/timmy/secret-rotations
fleet_secret_targets:
ezra:
env_file: /root/wizards/ezra/home/.env
ssh_authorized_keys_file: /root/.ssh/authorized_keys
services:
- hermes-ezra.service
- openclaw-ezra.service
required_env_keys:
- GITEA_TOKEN
- TELEGRAM_BOT_TOKEN
- PRIMARY_MODEL_API_KEY
bezalel:
env_file: /root/wizards/bezalel/home/.env
ssh_authorized_keys_file: /root/.ssh/authorized_keys
services:
- hermes-bezalel.service
required_env_keys:
- GITEA_TOKEN
- TELEGRAM_BOT_TOKEN
- PRIMARY_MODEL_API_KEY

View File

@@ -1,79 +0,0 @@
fleet_secret_bundle:
ezra:
env:
GITEA_TOKEN: !vault |
$ANSIBLE_VAULT;1.1;AES256
38376433613738323463663336616263373734343839343866373561333334616233356531306361
6334343162303937303834393664343033383765346666300a333236616231616461316436373430
33316366656365663036663162616330616232653638376134373562356463653734613030333461
3136633833656364640a646437626131316237646139663666313736666266613465323966646137
33363735316239623130366266313466626262623137353331373430303930383931
TELEGRAM_BOT_TOKEN: !vault |
$ANSIBLE_VAULT;1.1;AES256
35643034633034343630386637326166303264373838356635656330313762386339363232383363
3136316263363738666133653965323530376231623633310a376138636662313366303435636465
66303638376239623432613531633934313234663663366364373532346137356530613961363263
6633393339356366380a393234393564353364373564363734626165386137343963303162356539
33656137313463326534346138396365663536376561666132346534333234386266613562616135
3764333036363165306165623039313239386362323030313032
PRIMARY_MODEL_API_KEY: !vault |
$ANSIBLE_VAULT;1.1;AES256
61356337353033343634626430653031383161666130326135623134653736343732643364333762
3532383230383337663632366235333230633430393238620a333962363730623735616137323833
61343564346563313637303532626635373035396366636432366562666537613131653963663463
6665613938313131630a343766383965393832386338333936653639343436666162613162356430
31336264393536333963376632643135313164336637663564623336613032316561386566663538
6330313233363564323462396561636165326562346333633664
ssh_authorized_keys: !vault |
$ANSIBLE_VAULT;1.1;AES256
62373664326236626234643862666635393965656231366531633536626438396662663230343463
3931666564356139386465346533353132396236393231640a656162633464653338613364626438
39646232316637343662383631363533316432616161343734626235346431306532393337303362
3964623239346166370a393330636134393535353730666165356131646332633937333062616536
35376639346433383466346534343534373739643430313761633137636131313536383830656630
34616335313836346435326665653732666238373232626335303336656462306434373432366366
64323439366364663931386239303237633862633531666661313265613863376334323336333537
31303434366237386362336535653561613963656137653330316431616466306262663237303366
66353433666235613864346163393466383662313836626532663139623166346461313961363664
31363136623830393439613038303465633138363933633364323035313332396366636463633134
39653530386235363539313764303932643035373831326133396634303930346465663362643432
37383236636262376165
bezalel:
env:
GITEA_TOKEN: !vault |
$ANSIBLE_VAULT;1.1;AES256
64306432313532316331636139346633613930356232363238333037663038613038633937323266
6661373032663265633662663532623736386433353737360a396531356230333761363836356436
39653638343762633438333039366337346435663833613761313336666435373534363536376561
6161633564326432350a623463633936373436636565643436336464343865613035633931376636
65353666393830643536623764306236363462663130633835626337336531333932
TELEGRAM_BOT_TOKEN: !vault |
$ANSIBLE_VAULT;1.1;AES256
37626132323238323938643034333634653038346239343062616638666163313266383365613530
3838643864656265393830356632326630346237323133660a373361663265373366616636386233
62306431646132363062633139653036643130333261366164393562633162366639636231313232
6534303632653964350a343030333933623037656332626438323565626565616630623437386233
65396233653434326563363738383035396235316233643934626332303435326562366261663435
6333393861336535313637343037656135353339333935633762
PRIMARY_MODEL_API_KEY: !vault |
$ANSIBLE_VAULT;1.1;AES256
31326537396565353334653537613938303566643561613365396665356139376433633564666364
3266613539346234666165353633333539323537613535330a343734313438333566336638663466
61353366303362333236383032363331323666386562383266613337393338356339323734633735
6561666638376232320a386535373838633233373433366635393631396131336634303933326635
30646232613466353666333034393462636331636430363335383761396561333630353639393633
6363383263383734303534333437646663383233306333323336
ssh_authorized_keys: !vault |
$ANSIBLE_VAULT;1.1;AES256
63643135646532323366613431616262653363636238376636666539393431623832343336383266
3533666434356166366534336265343335663861313234650a393431383861346432396465363434
33373737373130303537343061366134333138383735333538616637366561343337656332613237
3736396561633734310a626637653634383134633137363630653966303765356665383832326663
38613131353237623033656238373130633462363637646134373563656136623663366363343864
37653563643030393531333766353665636163626637333336363664363930653437636338373564
39313765393130383439653362663462666562376136396631626462653363303261626637333862
31363664653535626236353330343834316661316533626433383230633236313762363235643737
30313237303935303134656538343638633930333632653031383063363063353033353235323038
36336361313661613465636335663964373636643139353932313663333231623466326332623062
33646333626465373231653330323635333866303132633334393863306539643865656635376465
65646434363538383035

View File

@@ -1,3 +0,0 @@
[fleet]
ezra ansible_host=143.198.27.163 ansible_user=root
bezalel ansible_host=67.205.155.108 ansible_user=root

View File

@@ -1,185 +0,0 @@
---
- name: Rotate vaulted fleet secrets
hosts: fleet
gather_facts: false
any_errors_fatal: true
serial: 100%
vars_files:
- ../inventory/group_vars/fleet_secrets.vault.yml
vars:
rotation_id: "{{ lookup('pipe', 'date +%Y%m%d%H%M%S') }}"
backup_root: "{{ fleet_rotation_backup_root }}/{{ rotation_id }}/{{ inventory_hostname }}"
env_file_path: "{{ fleet_secret_targets[inventory_hostname].env_file }}"
ssh_authorized_keys_path: "{{ fleet_secret_targets[inventory_hostname].ssh_authorized_keys_file }}"
env_backup_path: "{{ backup_root }}/env.before"
ssh_backup_path: "{{ backup_root }}/authorized_keys.before"
staged_env_path: "{{ backup_root }}/env.candidate"
staged_ssh_path: "{{ backup_root }}/authorized_keys.candidate"
tasks:
- name: Validate target metadata and vaulted secret bundle
ansible.builtin.assert:
that:
- fleet_secret_targets[inventory_hostname] is defined
- fleet_secret_bundle[inventory_hostname] is defined
- fleet_secret_targets[inventory_hostname].services | length > 0
- fleet_secret_targets[inventory_hostname].required_env_keys | length > 0
- fleet_secret_bundle[inventory_hostname].env is defined
- fleet_secret_bundle[inventory_hostname].ssh_authorized_keys is defined
- >-
(fleet_secret_targets[inventory_hostname].required_env_keys
| difference(fleet_secret_bundle[inventory_hostname].env.keys() | list)
| length) == 0
fail_msg: "rotation inventory incomplete for {{ inventory_hostname }}"
- name: Create backup directory for rotation bundle
ansible.builtin.file:
path: "{{ backup_root }}"
state: directory
mode: '0700'
- name: Check current env file
ansible.builtin.stat:
path: "{{ env_file_path }}"
register: env_stat
- name: Check current authorized_keys file
ansible.builtin.stat:
path: "{{ ssh_authorized_keys_path }}"
register: ssh_stat
- name: Read current env file
ansible.builtin.slurp:
src: "{{ env_file_path }}"
register: env_current
when: env_stat.stat.exists
- name: Read current authorized_keys file
ansible.builtin.slurp:
src: "{{ ssh_authorized_keys_path }}"
register: ssh_current
when: ssh_stat.stat.exists
- name: Save env rollback snapshot
ansible.builtin.copy:
content: "{{ env_current.content | b64decode }}"
dest: "{{ env_backup_path }}"
mode: '0600'
when: env_stat.stat.exists
- name: Save authorized_keys rollback snapshot
ansible.builtin.copy:
content: "{{ ssh_current.content | b64decode }}"
dest: "{{ ssh_backup_path }}"
mode: '0600'
when: ssh_stat.stat.exists
- name: Build staged env candidate
ansible.builtin.copy:
content: "{{ (env_current.content | b64decode) if env_stat.stat.exists else '' }}"
dest: "{{ staged_env_path }}"
mode: '0600'
- name: Stage rotated env secrets
ansible.builtin.lineinfile:
path: "{{ staged_env_path }}"
regexp: "^{{ item.key }}="
line: "{{ item.key }}={{ item.value }}"
create: true
loop: "{{ fleet_secret_bundle[inventory_hostname].env | dict2items }}"
loop_control:
label: "{{ item.key }}"
no_log: true
- name: Ensure SSH directory exists
ansible.builtin.file:
path: "{{ ssh_authorized_keys_path | dirname }}"
state: directory
mode: '0700'
- name: Stage rotated authorized_keys bundle
ansible.builtin.copy:
content: "{{ fleet_secret_bundle[inventory_hostname].ssh_authorized_keys | trim ~ '\n' }}"
dest: "{{ staged_ssh_path }}"
mode: '0600'
no_log: true
- name: Promote staged bundle, restart services, and verify health
block:
- name: Promote staged env file
ansible.builtin.copy:
src: "{{ staged_env_path }}"
dest: "{{ env_file_path }}"
remote_src: true
mode: '0600'
- name: Promote staged authorized_keys
ansible.builtin.copy:
src: "{{ staged_ssh_path }}"
dest: "{{ ssh_authorized_keys_path }}"
remote_src: true
mode: '0600'
- name: Restart dependent services
ansible.builtin.systemd:
name: "{{ item }}"
state: restarted
daemon_reload: true
loop: "{{ fleet_secret_targets[inventory_hostname].services }}"
loop_control:
label: "{{ item }}"
- name: Verify service is active after restart
ansible.builtin.command: "systemctl is-active {{ item }}"
register: service_status
changed_when: false
failed_when: service_status.stdout.strip() != 'active'
loop: "{{ fleet_secret_targets[inventory_hostname].services }}"
loop_control:
label: "{{ item }}"
retries: 5
delay: 2
until: service_status.stdout.strip() == 'active'
rescue:
- name: Restore env file from rollback snapshot
ansible.builtin.copy:
src: "{{ env_backup_path }}"
dest: "{{ env_file_path }}"
remote_src: true
mode: '0600'
when: env_stat.stat.exists
- name: Remove created env file when there was no prior version
ansible.builtin.file:
path: "{{ env_file_path }}"
state: absent
when: not env_stat.stat.exists
- name: Restore authorized_keys from rollback snapshot
ansible.builtin.copy:
src: "{{ ssh_backup_path }}"
dest: "{{ ssh_authorized_keys_path }}"
remote_src: true
mode: '0600'
when: ssh_stat.stat.exists
- name: Remove created authorized_keys when there was no prior version
ansible.builtin.file:
path: "{{ ssh_authorized_keys_path }}"
state: absent
when: not ssh_stat.stat.exists
- name: Restart services after rollback
ansible.builtin.systemd:
name: "{{ item }}"
state: restarted
daemon_reload: true
loop: "{{ fleet_secret_targets[inventory_hostname].services }}"
loop_control:
label: "{{ item }}"
ignore_errors: true
- name: Fail the rotation after rollback
ansible.builtin.fail:
msg: "Rotation failed for {{ inventory_hostname }}. Previous secrets restored from {{ backup_root }}."

View File

@@ -1,68 +0,0 @@
# Fleet Secret Rotation
Issue: `timmy-home#694`
This runbook adds a single place to rotate fleet API keys, service tokens, and SSH authorized keys without hand-editing remote hosts.
## Files
- `ansible/inventory/hosts.ini` — fleet hosts (`ezra`, `bezalel`)
- `ansible/inventory/group_vars/fleet.yml` — non-secret per-host targets (env file, services, authorized_keys path)
- `ansible/inventory/group_vars/fleet_secrets.vault.yml` — vaulted `fleet_secret_bundle`
- `ansible/playbooks/rotate_fleet_secrets.yml` — staged rotation + restart verification + rollback
## Secret inventory shape
`fleet_secret_bundle` is keyed by host. Each host carries the env secrets to rewrite plus the full `authorized_keys` payload to distribute.
```yaml
fleet_secret_bundle:
ezra:
env:
GITEA_TOKEN: !vault |
...
TELEGRAM_BOT_TOKEN: !vault |
...
PRIMARY_MODEL_API_KEY: !vault |
...
ssh_authorized_keys: !vault |
...
```
The committed vault file contains placeholder encrypted values only. Replace them with real rotated material before production use.
## Rotate a new bundle
From repo root:
```bash
cd ansible
ansible-vault edit inventory/group_vars/fleet_secrets.vault.yml
ansible-playbook -i inventory/hosts.ini playbooks/rotate_fleet_secrets.yml --ask-vault-pass
```
Or update one value at a time with `ansible-vault encrypt_string` and paste it into `fleet_secret_bundle`.
## What the playbook does
1. Validates that each host has a secret bundle and target metadata.
2. Writes rollback snapshots under `/var/lib/timmy/secret-rotations/<rotation_id>/<host>/`.
3. Stages a candidate `.env` file and candidate `authorized_keys` file before promotion.
4. Promotes staged files into place.
5. Restarts every declared dependent service.
6. Verifies each service with `systemctl is-active`.
7. If anything fails, restores the previous `.env` and `authorized_keys`, restarts services again, and aborts the run.
## Rollback semantics
Rollback is host-safe and automatic inside the playbook `rescue:` block.
- Existing `.env` and `authorized_keys` files are restored from backup when they existed before rotation.
- Newly created files are removed if the host had no prior version.
- Service restart is retried after rollback so the node returns to the last-known-good bundle.
## Operational notes
- Keep `required_env_keys` in `ansible/inventory/group_vars/fleet.yml` aligned with each house's real runtime contract.
- `ssh_authorized_keys` distributes public keys only. Rotate corresponding private keys out-of-band, then publish the new authorized key list through the vault.
- Use one vault edit per rotation window so API keys, bot tokens, and SSH access move together.

View File

@@ -9,7 +9,6 @@ Quick-reference index for common operational tasks across the Timmy Foundation i
| Task | Location | Command/Procedure |
|------|----------|-------------------|
| Deploy fleet update | fleet-ops | `ansible-playbook playbooks/provision_and_deploy.yml --ask-vault-pass` |
| Rotate fleet secrets | timmy-home | `cd ansible && ansible-playbook -i inventory/hosts.ini playbooks/rotate_fleet_secrets.yml --ask-vault-pass` |
| Check fleet health | fleet-ops | `python3 scripts/fleet_readiness.py` |
| Agent scorecard | fleet-ops | `python3 scripts/agent_scorecard.py` |
| View fleet manifest | fleet-ops | `cat manifest.yaml` |

View File

@@ -0,0 +1,218 @@
# QA Triage Action Plan — Foundation-Wide (2026-04-14)
> **Source:** Issue #691 — Cross-Repo Deep QA Report
> **Generated:** 2026-04-14
> **Status:** Active triage — actionable steps for each finding
---
## Executive Summary
The QA sweep identified systemic issues across the Foundation. Current state (verified live):
| Metric | QA Report | Current | Trend |
|--------|-----------|---------|-------|
| Total open PRs | ~55+ | **166** | Worsening |
| Repos with dupes | 3 | **5 (all)** | Worsening |
| Duplicate PR issues | 7+ | **58** | Critical |
| Prod surfaces reachable | 0/4 | 0/4 | Unchanged |
**The core problem:** Burn sessions generate faster than triage can absorb. The backlog is growing, not shrinking.
---
## P0 — Critical
### 1. Production Surfaces Down (404 on all endpoints)
**Status:** Unchanged since QA report
**Impact:** Zero users can reach any Timmy surface. The Door (crisis intervention) is unreachable.
| Surface | URL | Status |
|---------|-----|--------|
| Root | http://143.198.27.163/ | nginx 404 |
| Nexus | http://143.198.27.163/nexus/ | 404 |
| Playground | http://143.198.27.163/playground/ | 404 |
| Tower | http://143.198.27.163/tower/ | 404 |
| Domain | https://alexanderwhitestone.com/ | DNS broken |
**Action:**
- [ ] Verify DNS records for alexanderwhitestone.com (check registrar)
- [ ] SSH to VPS, check nginx config: `nginx -T`
- [ ] Ensure server blocks exist for each location
- [ ] Restart nginx: `systemctl restart nginx`
- [ ] Tracked in the-nexus#1105
**Owner:** Infrastructure
**Priority:** Immediate — this is the mission
### 2. the-playground index.html Broken
**Status:** Unconfirmed since QA report
**Impact:** Playground app crashes on load — missing script tags
**Action:**
- [ ] Read the-playground/index.html
- [ ] Verify script tags for all JS modules
- [ ] Fix missing imports
- [ ] Tracked in the-playground#200
**Owner:** the-playground
**Priority:** High — blocks user-facing playground
---
## P1 — High (Duplicate PR Crisis)
### 3. Duplicate PR Storm Across All Repos
**Current state (verified live 2026-04-14):**
| Repo | Open PRs | Issues with Duplicates | Worst Case |
|------|----------|----------------------|------------|
| the-nexus | 44 | 16 | Issue #1509 → 4 PRs |
| the-playground | 31 | 10 | Issue #180 → 3 PRs |
| the-door | 27 | 6 | Issue #988 → 7 PRs |
| timmy-config | 50 | 20 | Issue #50 → 7 PRs |
| timmy-home | 14 | 6 | Issue #50 → 6 PRs |
| **Total** | **166** | **58 issues** | — |
**Root cause:** Burn sessions create branches without checking for existing PRs on the same issue. No deduplication gate in the burn pipeline.
**Immediate action — close duplicates per repo:**
For each issue with multiple PRs:
1. Keep the PR with the most commits/diff (most complete implementation)
2. Close all others with comment: "Closing duplicate. See #PR for primary implementation."
3. If no PR is clearly superior, keep the oldest (first mover)
**Script to identify duplicates:**
```bash
# For each repo, list issues with >1 open PR
python3 scripts/duplicate-pr-detector.py --repo <repo> --close-duplicates
```
**Long-term fix:**
- [ ] Add pre-flight check to burn loop: query open PRs before creating new branch
- [ ] Add Gitea label `burn-active` to track which issues have active burn PRs
- [ ] Add CI check that rejects PR if another open PR references the same issue
**Owner:** Fleet / Burn infrastructure
**Priority:** High — duplicates waste review time and create merge conflicts
### 4. Misfiled PR in wrong repo
**the-nexus PR #1521:** "timmy-home Backlog Triage Report" is filed in the-nexus but concerns timmy-home.
**Action:**
- [ ] Close PR #1521 in the-nexus with redirect comment
- [ ] File content as issue or PR in timmy-home if still relevant
---
## P2 — Medium
### 5. the-door Crisis Features Blocked
Mission-critical PRs sitting unreviewed:
| Issue | Title | Impact |
|-------|-------|--------|
| #91 | Safety plan improvements | User safety |
| #89 | Safety plan enhancements | User safety |
| #90 | Crisis overlay fixes | UX |
| #87 | Crisis overlay bugs | UX |
| 988 link | Crisis hotline link fix | **Life safety** |
**Action:**
- [ ] Prioritize the-door PR review over all other repos
- [ ] Assign a reviewer or run dedicated triage session for the-door only
- [ ] After review, merge in dependency order
**Owner:** Crisis team / Alexander
**Priority:** High — this is the mission
### 6. Branch Protection Missing Foundation-Wide
No repo has branch protection enabled. Any member can push directly to main.
**Action:**
- [ ] Enable branch protection on all repos with:
- Require 1 approval before merge
- Require CI to pass (where CI exists)
- Dismiss stale approvals on new commits
- [ ] Covered in timmy-home PR #606 but not yet implemented
**Repos without CI (need smoke test first):**
- the-playground
- the-beacon
- timmy-home
**Owner:** Alexander / Infrastructure
**Priority:** Medium — prevents accidental breakage
---
## P3 — Low (Process Improvements)
### 7. Burn Session Deduplication Gate
**Problem:** Burn loops don't check for existing PRs before creating new ones.
**Solution:** Pre-flight check in burn pipeline:
```python
def has_open_pr(owner, repo, issue_number):
prs = gitea.get_pulls(owner, repo, state="open")
for pr in prs:
if f"#{issue_number}" in (pr.get("body", "") or ""):
return True
return False
```
**Action:**
- [ ] Add to hermes-agent burn loop
- [ ] Add to timmy-config burn scripts
- [ ] Test with dry-run before enabling
### 8. Nightly Triage Cron
**Problem:** No automated triage. Duplicates accumulate until manual sweep.
**Solution:** Nightly cron that:
1. Scans all repos for duplicate PRs
2. Posts summary to a triage channel
3. Auto-closes duplicates older than 48h with lower diff count
**Action:**
- [ ] Design triage cron job spec
- [ ] Implement as hermes cron job
- [ ] Run nightly at 03:00 UTC
---
## Priority Order (Execution Sequence)
1. **Fix DNS/nginx** — The Door must be reachable (crisis intervention = the mission)
2. **Close duplicate PRs** — 58 issues with dupes, clear the noise
3. **Review the-door PRs** — Mission-critical crisis features
4. **Fix the-playground** — User-facing app broken
5. **Enable branch protection** — Prevent future breakage
6. **Build dedup gate** — Prevent future duplicate storms
7. **Nightly triage cron** — Automated hygiene
---
## Verification Checklist
After completing actions above, verify:
- [ ] http://143.198.27.163/ returns a page (not 404)
- [ ] https://alexanderwhitestone.com/ resolves
- [ ] All repos have <5 duplicate PRs
- [ ] the-door has 0 unreviewed safety/crisis PRs
- [ ] Branch protection enabled on all repos
- [ ] Burn loop has pre-flight PR check
---
*This plan converts QA findings into executable actions. Each item has an owner, priority, and verification step.*

View File

@@ -1,87 +0,0 @@
#!/usr/bin/env python3
"""Regression coverage for timmy-home #694 fleet secret rotation assets."""
from pathlib import Path
import unittest
import yaml
ROOT = Path(__file__).resolve().parents[1]
ANSIBLE_DIR = ROOT / "ansible"
HOSTS_FILE = ANSIBLE_DIR / "inventory" / "hosts.ini"
TARGETS_FILE = ANSIBLE_DIR / "inventory" / "group_vars" / "fleet.yml"
SECRETS_FILE = ANSIBLE_DIR / "inventory" / "group_vars" / "fleet_secrets.vault.yml"
PLAYBOOK_FILE = ANSIBLE_DIR / "playbooks" / "rotate_fleet_secrets.yml"
DOC_FILE = ROOT / "docs" / "FLEET_SECRET_ROTATION.md"
class TestFleetSecretRotation(unittest.TestCase):
def test_inventory_declares_each_host_target(self):
self.assertTrue(HOSTS_FILE.exists(), "missing ansible inventory hosts file")
self.assertTrue(TARGETS_FILE.exists(), "missing fleet target metadata")
hosts_text = HOSTS_FILE.read_text(encoding="utf-8")
self.assertIn("[fleet]", hosts_text)
self.assertIn("ezra", hosts_text)
self.assertIn("bezalel", hosts_text)
targets = yaml.safe_load(TARGETS_FILE.read_text(encoding="utf-8"))
self.assertIn("fleet_secret_targets", targets)
expected_env_files = {
"ezra": "/root/wizards/ezra/home/.env",
"bezalel": "/root/wizards/bezalel/home/.env",
}
for host, env_file in expected_env_files.items():
self.assertIn(host, targets["fleet_secret_targets"])
target = targets["fleet_secret_targets"][host]
self.assertEqual(target["env_file"], env_file)
self.assertEqual(target["ssh_authorized_keys_file"], "/root/.ssh/authorized_keys")
self.assertGreaterEqual(len(target["services"]), 1)
self.assertGreaterEqual(len(target["required_env_keys"]), 3)
def test_vault_file_contains_encrypted_secret_bundle_for_each_host(self):
self.assertTrue(SECRETS_FILE.exists(), "missing vaulted secrets inventory")
text = SECRETS_FILE.read_text(encoding="utf-8")
self.assertIn("fleet_secret_bundle:", text)
self.assertIn("$ANSIBLE_VAULT;1.1;AES256", text)
for host in ("ezra", "bezalel"):
self.assertIn(f" {host}:", text)
self.assertGreaterEqual(text.count("!vault |"), 4)
def test_playbook_has_staging_verification_and_rollback(self):
self.assertTrue(PLAYBOOK_FILE.exists(), "missing rotation playbook")
text = PLAYBOOK_FILE.read_text(encoding="utf-8")
for snippet in (
"any_errors_fatal: true",
"vars_files:",
"fleet_secrets.vault.yml",
"backup_root",
"env_backup_path",
"ssh_backup_path",
"lineinfile:",
"copy:",
"systemd:",
"state: restarted",
"systemctl is-active",
"block:",
"rescue:",
):
self.assertIn(snippet, text)
def test_docs_explain_rotation_command_and_rollback(self):
self.assertTrue(DOC_FILE.exists(), "missing fleet secret rotation docs")
text = DOC_FILE.read_text(encoding="utf-8")
for snippet in (
"ansible-playbook",
"--ask-vault-pass",
"rollback",
"authorized_keys",
"fleet_secret_bundle",
):
self.assertIn(snippet, text)
if __name__ == "__main__":
unittest.main(verbosity=2)