diff --git a/scripts/loop_guard.py b/scripts/loop_guard.py index 0da2221..fb42817 100644 --- a/scripts/loop_guard.py +++ b/scripts/loop_guard.py @@ -202,7 +202,11 @@ def load_queue() -> list[dict]: # Persist the cleaned queue so stale entries don't recur _save_cleaned_queue(data, open_numbers) return ready - except (json.JSONDecodeError, OSError): + except json.JSONDecodeError as exc: + print(f"[loop-guard] WARNING: Corrupt queue.json ({exc}) — returning empty queue") + return [] + except OSError as exc: + print(f"[loop-guard] WARNING: Cannot read queue.json ({exc}) — returning empty queue") return [] diff --git a/scripts/triage_score.py b/scripts/triage_score.py index 1f47517..f1d9070 100644 --- a/scripts/triage_score.py +++ b/scripts/triage_score.py @@ -41,6 +41,7 @@ REPO_SLUG = os.environ.get("REPO_SLUG", "rockachopa/Timmy-time-dashboard") TOKEN_FILE = Path.home() / ".hermes" / "gitea_token" REPO_ROOT = Path(__file__).resolve().parent.parent QUEUE_FILE = REPO_ROOT / ".loop" / "queue.json" +QUEUE_BACKUP_FILE = REPO_ROOT / ".loop" / "queue.json.bak" RETRO_FILE = REPO_ROOT / ".loop" / "retro" / "triage.jsonl" QUARANTINE_FILE = REPO_ROOT / ".loop" / "quarantine.json" CYCLE_RETRO_FILE = REPO_ROOT / ".loop" / "retro" / "cycles.jsonl" @@ -342,9 +343,38 @@ def run_triage() -> list[dict]: ready = [s for s in scored if s["ready"]] not_ready = [s for s in scored if not s["ready"]] + # Save backup before writing (if current file exists and is valid) + if QUEUE_FILE.exists(): + try: + json.loads(QUEUE_FILE.read_text()) # Validate current file + QUEUE_BACKUP_FILE.write_text(QUEUE_FILE.read_text()) + except (json.JSONDecodeError, OSError): + pass # Current file is corrupt, don't overwrite backup + + # Write new queue file QUEUE_FILE.parent.mkdir(parents=True, exist_ok=True) QUEUE_FILE.write_text(json.dumps(ready, indent=2) + "\n") + # Validate the write by re-reading and parsing + try: + json.loads(QUEUE_FILE.read_text()) + except (json.JSONDecodeError, OSError) as exc: + print(f"[triage] ERROR: queue.json validation failed: {exc}", file=sys.stderr) + # Restore from backup if available + if QUEUE_BACKUP_FILE.exists(): + try: + backup_data = QUEUE_BACKUP_FILE.read_text() + json.loads(backup_data) # Validate backup + QUEUE_FILE.write_text(backup_data) + print(f"[triage] Restored queue.json from backup") + except (json.JSONDecodeError, OSError) as restore_exc: + print(f"[triage] ERROR: Backup restore failed: {restore_exc}", file=sys.stderr) + # Write empty list as last resort + QUEUE_FILE.write_text("[]\n") + else: + # No backup, write empty list + QUEUE_FILE.write_text("[]\n") + # Write retro entry retro_entry = { "timestamp": datetime.now(timezone.utc).isoformat(), diff --git a/tests/loop/test_loop_guard_corrupt_queue.py b/tests/loop/test_loop_guard_corrupt_queue.py new file mode 100644 index 0000000..7047234 --- /dev/null +++ b/tests/loop/test_loop_guard_corrupt_queue.py @@ -0,0 +1,97 @@ +"""Tests for load_queue corrupt JSON handling in loop_guard.py.""" + +from __future__ import annotations + +import json +from pathlib import Path + +import pytest +import scripts.loop_guard as lg + + +@pytest.fixture(autouse=True) +def _isolate(tmp_path, monkeypatch): + """Redirect loop_guard paths to tmp_path for isolation.""" + monkeypatch.setattr(lg, "QUEUE_FILE", tmp_path / "queue.json") + monkeypatch.setattr(lg, "IDLE_STATE_FILE", tmp_path / "idle_state.json") + monkeypatch.setattr(lg, "CYCLE_RESULT_FILE", tmp_path / "cycle_result.json") + monkeypatch.setattr(lg, "GITEA_API", "http://test:3000/api/v1") + monkeypatch.setattr(lg, "REPO_SLUG", "owner/repo") + + +def test_load_queue_missing_file(tmp_path): + """Missing queue file returns empty list.""" + result = lg.load_queue() + assert result == [] + + +def test_load_queue_valid_data(tmp_path): + """Valid queue.json returns ready items.""" + data = [ + {"issue": 1, "title": "Ready issue", "ready": True}, + {"issue": 2, "title": "Not ready", "ready": False}, + ] + lg.QUEUE_FILE.parent.mkdir(parents=True, exist_ok=True) + lg.QUEUE_FILE.write_text(json.dumps(data, indent=2)) + + result = lg.load_queue() + assert len(result) == 1 + assert result[0]["issue"] == 1 + + +def test_load_queue_corrupt_json_logs_warning(tmp_path, capsys): + """Corrupt queue.json returns empty list and logs warning.""" + lg.QUEUE_FILE.parent.mkdir(parents=True, exist_ok=True) + lg.QUEUE_FILE.write_text("not valid json {{{") + + result = lg.load_queue() + assert result == [] + + captured = capsys.readouterr() + assert "WARNING" in captured.out + assert "Corrupt queue.json" in captured.out + + +def test_load_queue_not_a_list(tmp_path): + """Queue.json that is not a list returns empty list.""" + lg.QUEUE_FILE.parent.mkdir(parents=True, exist_ok=True) + lg.QUEUE_FILE.write_text(json.dumps({"not": "a list"})) + + result = lg.load_queue() + assert result == [] + + +def test_load_queue_no_ready_items(tmp_path): + """Queue with no ready items returns empty list.""" + data = [ + {"issue": 1, "title": "Not ready 1", "ready": False}, + {"issue": 2, "title": "Not ready 2", "ready": False}, + ] + lg.QUEUE_FILE.parent.mkdir(parents=True, exist_ok=True) + lg.QUEUE_FILE.write_text(json.dumps(data, indent=2)) + + result = lg.load_queue() + assert result == [] + + +def test_load_queue_oserror_logs_warning(tmp_path, monkeypatch, capsys): + """OSError when reading queue.json returns empty list and logs warning.""" + lg.QUEUE_FILE.parent.mkdir(parents=True, exist_ok=True) + lg.QUEUE_FILE.write_text("[]") + + # Mock Path.read_text to raise OSError + original_read_text = Path.read_text + + def mock_read_text(self, *args, **kwargs): + if self.name == "queue.json": + raise OSError("Permission denied") + return original_read_text(self, *args, **kwargs) + + monkeypatch.setattr(Path, "read_text", mock_read_text) + + result = lg.load_queue() + assert result == [] + + captured = capsys.readouterr() + assert "WARNING" in captured.out + assert "Cannot read queue.json" in captured.out diff --git a/tests/scripts/test_triage_score_validation.py b/tests/scripts/test_triage_score_validation.py new file mode 100644 index 0000000..882ac43 --- /dev/null +++ b/tests/scripts/test_triage_score_validation.py @@ -0,0 +1,159 @@ +"""Tests for queue.json validation and backup in triage_score.py.""" + +from __future__ import annotations + +import json + +import pytest +import scripts.triage_score as ts + + +@pytest.fixture(autouse=True) +def _isolate(tmp_path, monkeypatch): + """Redirect triage_score paths to tmp_path for isolation.""" + monkeypatch.setattr(ts, "QUEUE_FILE", tmp_path / "queue.json") + monkeypatch.setattr(ts, "QUEUE_BACKUP_FILE", tmp_path / "queue.json.bak") + monkeypatch.setattr(ts, "RETRO_FILE", tmp_path / "retro" / "triage.jsonl") + monkeypatch.setattr(ts, "QUARANTINE_FILE", tmp_path / "quarantine.json") + monkeypatch.setattr(ts, "CYCLE_RETRO_FILE", tmp_path / "retro" / "cycles.jsonl") + + +def test_backup_created_on_write(tmp_path): + """When writing queue.json, a backup should be created from previous valid file.""" + # Create initial valid queue file + initial_data = [{"issue": 1, "title": "Test", "ready": True}] + ts.QUEUE_FILE.parent.mkdir(parents=True, exist_ok=True) + ts.QUEUE_FILE.write_text(json.dumps(initial_data)) + + # Write new data + new_data = [{"issue": 2, "title": "New", "ready": True}] + ts.QUEUE_FILE.write_text(json.dumps(new_data, indent=2) + "\n") + + # Manually run the backup logic as run_triage would + if ts.QUEUE_FILE.exists(): + try: + json.loads(ts.QUEUE_FILE.read_text()) + ts.QUEUE_BACKUP_FILE.write_text(ts.QUEUE_FILE.read_text()) + except (json.JSONDecodeError, OSError): + pass + + # Both files should exist with same content + assert ts.QUEUE_BACKUP_FILE.exists() + assert json.loads(ts.QUEUE_BACKUP_FILE.read_text()) == new_data + + +def test_corrupt_queue_restored_from_backup(tmp_path, capsys): + """If queue.json is corrupt, it should be restored from backup.""" + # Create a valid backup + valid_data = [{"issue": 1, "title": "Backup", "ready": True}] + ts.QUEUE_BACKUP_FILE.parent.mkdir(parents=True, exist_ok=True) + ts.QUEUE_BACKUP_FILE.write_text(json.dumps(valid_data, indent=2) + "\n") + + # Create a corrupt queue file + ts.QUEUE_FILE.parent.mkdir(parents=True, exist_ok=True) + ts.QUEUE_FILE.write_text("not valid json {{{") + + # Run validation and restore logic + try: + json.loads(ts.QUEUE_FILE.read_text()) + except (json.JSONDecodeError, OSError): + if ts.QUEUE_BACKUP_FILE.exists(): + try: + backup_data = ts.QUEUE_BACKUP_FILE.read_text() + json.loads(backup_data) # Validate backup + ts.QUEUE_FILE.write_text(backup_data) + print("[triage] Restored queue.json from backup") + except (json.JSONDecodeError, OSError): + ts.QUEUE_FILE.write_text("[]\n") + else: + ts.QUEUE_FILE.write_text("[]\n") + + # Queue should be restored from backup + assert json.loads(ts.QUEUE_FILE.read_text()) == valid_data + captured = capsys.readouterr() + assert "Restored queue.json from backup" in captured.out + + +def test_corrupt_queue_no_backup_writes_empty_list(tmp_path): + """If queue.json is corrupt and no backup exists, write empty list.""" + # Ensure no backup exists + assert not ts.QUEUE_BACKUP_FILE.exists() + + # Create a corrupt queue file + ts.QUEUE_FILE.parent.mkdir(parents=True, exist_ok=True) + ts.QUEUE_FILE.write_text("not valid json {{{") + + # Run validation and restore logic + try: + json.loads(ts.QUEUE_FILE.read_text()) + except (json.JSONDecodeError, OSError): + if ts.QUEUE_BACKUP_FILE.exists(): + try: + backup_data = ts.QUEUE_BACKUP_FILE.read_text() + json.loads(backup_data) + ts.QUEUE_FILE.write_text(backup_data) + except (json.JSONDecodeError, OSError): + ts.QUEUE_FILE.write_text("[]\n") + else: + ts.QUEUE_FILE.write_text("[]\n") + + # Should have empty list + assert json.loads(ts.QUEUE_FILE.read_text()) == [] + + +def test_corrupt_backup_writes_empty_list(tmp_path): + """If both queue.json and backup are corrupt, write empty list.""" + # Create a corrupt backup + ts.QUEUE_BACKUP_FILE.parent.mkdir(parents=True, exist_ok=True) + ts.QUEUE_BACKUP_FILE.write_text("also corrupt backup") + + # Create a corrupt queue file + ts.QUEUE_FILE.parent.mkdir(parents=True, exist_ok=True) + ts.QUEUE_FILE.write_text("not valid json {{{") + + # Run validation and restore logic + try: + json.loads(ts.QUEUE_FILE.read_text()) + except (json.JSONDecodeError, OSError): + if ts.QUEUE_BACKUP_FILE.exists(): + try: + backup_data = ts.QUEUE_BACKUP_FILE.read_text() + json.loads(backup_data) + ts.QUEUE_FILE.write_text(backup_data) + except (json.JSONDecodeError, OSError): + ts.QUEUE_FILE.write_text("[]\n") + else: + ts.QUEUE_FILE.write_text("[]\n") + + # Should have empty list + assert json.loads(ts.QUEUE_FILE.read_text()) == [] + + +def test_valid_queue_not_corrupt_no_backup_overwrite(tmp_path): + """Don't overwrite backup if current queue.json is corrupt.""" + # Create a valid backup + valid_backup = [{"issue": 99, "title": "Old Backup", "ready": True}] + ts.QUEUE_BACKUP_FILE.parent.mkdir(parents=True, exist_ok=True) + ts.QUEUE_BACKUP_FILE.write_text(json.dumps(valid_backup, indent=2) + "\n") + + # Create a corrupt queue file + ts.QUEUE_FILE.parent.mkdir(parents=True, exist_ok=True) + ts.QUEUE_FILE.write_text("corrupt data") + + # Try to save backup (should skip because current is corrupt) + if ts.QUEUE_FILE.exists(): + try: + json.loads(ts.QUEUE_FILE.read_text()) # This will fail + ts.QUEUE_BACKUP_FILE.write_text(ts.QUEUE_FILE.read_text()) + except (json.JSONDecodeError, OSError): + pass # Should hit this branch + + # Backup should still have original valid data + assert json.loads(ts.QUEUE_BACKUP_FILE.read_text()) == valid_backup + + +def test_backup_path_configuration(): + """Ensure backup file path is properly configured relative to queue file.""" + assert ts.QUEUE_BACKUP_FILE.parent == ts.QUEUE_FILE.parent + assert ts.QUEUE_BACKUP_FILE.name == "queue.json.bak" + assert ts.QUEUE_FILE.name == "queue.json"