From ff1e43c23548743afe3b77ff7dcfa0b309dfa7bc Mon Sep 17 00:00:00 2001 From: Timmy Time Date: Thu, 19 Mar 2026 20:10:05 -0400 Subject: [PATCH] =?UTF-8?q?[loop-cycle-545]=20fix:=20queue=20auto-hygiene?= =?UTF-8?q?=20=E2=80=94=20filter=20closed=20issues=20on=20read=20(#524)=20?= =?UTF-8?q?(#529)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- scripts/loop_guard.py | 76 ++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 72 insertions(+), 4 deletions(-) diff --git a/scripts/loop_guard.py b/scripts/loop_guard.py index 7ef4449..76c9db2 100644 --- a/scripts/loop_guard.py +++ b/scripts/loop_guard.py @@ -18,13 +18,19 @@ Exit codes: from __future__ import annotations import json +import os import sys import time +import urllib.request from pathlib import Path REPO_ROOT = Path(__file__).resolve().parent.parent QUEUE_FILE = REPO_ROOT / ".loop" / "queue.json" IDLE_STATE_FILE = REPO_ROOT / ".loop" / "idle_state.json" +TOKEN_FILE = Path.home() / ".hermes" / "gitea_token" + +GITEA_API = os.environ.get("GITEA_API", "http://localhost:3000/api/v1") +REPO_SLUG = os.environ.get("REPO_SLUG", "rockachopa/Timmy-time-dashboard") # Backoff sequence: 60s, 120s, 240s, 600s max BACKOFF_BASE = 60 @@ -32,19 +38,81 @@ BACKOFF_MAX = 600 BACKOFF_MULTIPLIER = 2 +def _get_token() -> str: + """Read Gitea token from env or file.""" + token = os.environ.get("GITEA_TOKEN", "").strip() + if not token and TOKEN_FILE.exists(): + token = TOKEN_FILE.read_text().strip() + return token + + +def _fetch_open_issue_numbers() -> set[int] | None: + """Fetch open issue numbers from Gitea. Returns None on failure.""" + token = _get_token() + if not token: + return None + try: + numbers: set[int] = set() + page = 1 + while True: + url = ( + f"{GITEA_API}/repos/{REPO_SLUG}/issues" + f"?state=open&type=issues&limit=50&page={page}" + ) + req = urllib.request.Request(url, headers={ + "Authorization": f"token {token}", + "Accept": "application/json", + }) + with urllib.request.urlopen(req, timeout=10) as resp: + data = json.loads(resp.read()) + if not data: + break + for issue in data: + numbers.add(issue["number"]) + if len(data) < 50: + break + page += 1 + return numbers + except Exception: + return None + + def load_queue() -> list[dict]: - """Load queue.json and return ready items.""" + """Load queue.json and return ready items, filtering out closed issues.""" if not QUEUE_FILE.exists(): return [] try: data = json.loads(QUEUE_FILE.read_text()) - if isinstance(data, list): - return [item for item in data if item.get("ready")] - return [] + if not isinstance(data, list): + return [] + ready = [item for item in data if item.get("ready")] + if not ready: + return [] + + # Filter out issues that are no longer open (auto-hygiene) + open_numbers = _fetch_open_issue_numbers() + if open_numbers is not None: + before = len(ready) + ready = [item for item in ready if item.get("issue") in open_numbers] + removed = before - len(ready) + if removed > 0: + print(f"[loop-guard] Filtered {removed} closed issue(s) from queue") + # Persist the cleaned queue so stale entries don't recur + _save_cleaned_queue(data, open_numbers) + return ready except (json.JSONDecodeError, OSError): return [] +def _save_cleaned_queue(full_queue: list[dict], open_numbers: set[int]) -> None: + """Rewrite queue.json without closed issues.""" + cleaned = [item for item in full_queue if item.get("issue") in open_numbers] + try: + QUEUE_FILE.write_text(json.dumps(cleaned, indent=2) + "\n") + except OSError: + pass + + def load_idle_state() -> dict: """Load persistent idle state.""" if not IDLE_STATE_FILE.exists():