Compare commits

..

1 Commits

Author SHA1 Message Date
Alexander Whitestone
71a0575869 fix: #1470 - Add PR backlog analyzer for timmy-config
Some checks failed
CI / test (pull_request) Failing after 18s
CI / validate (pull_request) Failing after 17s
Review Approval Gate / verify-review (pull_request) Failing after 2s
2026-04-14 23:24:29 -04:00
3 changed files with 222 additions and 76 deletions

View File

@@ -28,16 +28,11 @@ except ImportError:
websockets = None
from nexus.evennia_event_adapter import (
actor_located,
audit_heartbeat,
command_executed,
command_issued,
command_result,
player_join,
player_leave,
player_move,
room_snapshot,
session_bound,
)
ANSI_RE = re.compile(r"\x1b\[[0-9;]*[A-Za-z]")
@@ -54,82 +49,31 @@ def strip_ansi(text: str) -> str:
return ANSI_RE.sub("", text or "")
def clean_lines(text: str) -> list[str]:
"""Strip ANSI codes and split into non-empty lines."""
text = strip_ansi(text).replace("\r", "")
return [line.strip() for line in text.split("\n") if line.strip()]
def parse_room_output(text: str) -> dict | None:
"""Parse Evennia room output into structured data with title, desc, exits, objects."""
lines = clean_lines(text)
if len(lines) < 2:
return None
title = lines[0]
desc = lines[1]
exits = []
objects = []
for line in lines[2:]:
if line.startswith("Exits:"):
raw = line.split(":", 1)[1].strip().replace(" and ", ", ")
exits = [{"key": t.strip(), "destination_id": t.strip().title(), "destination_key": t.strip().title()} for t in raw.split(",") if t.strip()]
elif line.startswith("You see:"):
raw = line.split(":", 1)[1].strip().replace(" and ", ", ")
parts = [t.strip() for t in raw.split(",") if t.strip()]
objects = [{"id": p.removeprefix("a ").removeprefix("an "), "key": p.removeprefix("a ").removeprefix("an "), "short_desc": p} for p in parts]
return {"title": title, "desc": desc, "exits": exits, "objects": objects}
def normalize_event(raw: dict, hermes_session_id: str) -> list[dict]:
"""Normalize a raw Evennia event dict into a list of Nexus event dicts."""
out = []
event = raw.get("event")
actor = raw.get("actor", "Timmy")
timestamp = raw.get("timestamp")
if event == "connect":
out.append(session_bound(hermes_session_id, evennia_account=actor, evennia_character=actor, timestamp=timestamp))
parsed = parse_room_output(raw.get("output", ""))
if parsed:
out.append(actor_located(actor, parsed["title"], parsed["title"], timestamp=timestamp))
out.append(room_snapshot(parsed["title"], parsed["title"], parsed["desc"], exits=parsed["exits"], objects=parsed["objects"], timestamp=timestamp))
elif event == "command":
cmd = raw.get("command", "")
output = raw.get("output", "")
out.append(command_issued(hermes_session_id, actor, cmd, timestamp=timestamp))
success = not output.startswith("Command '") and not output.startswith("Could not find")
out.append(command_result(hermes_session_id, actor, cmd, strip_ansi(output), success=success, timestamp=timestamp))
parsed = parse_room_output(output)
if parsed:
out.append(actor_located(actor, parsed["title"], parsed["title"], timestamp=timestamp))
out.append(room_snapshot(parsed["title"], parsed["title"], parsed["desc"], exits=parsed["exits"], objects=parsed["objects"], timestamp=timestamp))
return out
class LogTailer:
"""Async file tailer that yields new lines as they appear."""
def __init__(self, path: str, poll_interval: float = 0.5):
self.path = path
self.poll_interval = poll_interval
self._offset = 0
async def tail(self):
"""Yield new lines from the file, starting from end."""
# Start at end of file
if os.path.exists(self.path):
self._offset = os.path.getsize(self.path)
while True:
try:
if not os.path.exists(self.path):
await asyncio.sleep(self.poll_interval)
continue
size = os.path.getsize(self.path)
if size < self._offset:
# File was truncated/rotated
self._offset = 0
if size > self._offset:
with open(self.path, "r") as f:
f.seek(self._offset)
@@ -138,7 +82,7 @@ class LogTailer:
if line:
yield line
self._offset = f.tell()
await asyncio.sleep(self.poll_interval)
except Exception as e:
print(f"[tailer] Error reading {self.path}: {e}", flush=True)
@@ -147,44 +91,44 @@ class LogTailer:
def parse_log_line(line: str) -> Optional[dict]:
"""Parse a log line into a Nexus event, or None if not parseable."""
# Movement events
m = MOVE_RE.search(line)
if m:
return player_move(m.group(1), m.group(3), m.group(2))
# Command events
m = CMD_RE.search(line)
if m:
return command_executed(m.group(1), m.group(2), m.group(3) or "")
# Session start
m = SESSION_START_RE.search(line)
if m:
return player_join(m.group(2), m.group(1))
# Session end
m = SESSION_END_RE.search(line)
if m:
return player_leave("", m.group(1), session_duration=float(m.group(2)))
# Server login
m = LOGIN_RE.search(line)
if m:
return player_join(m.group(1), ip_address=m.group(2))
# Server logout
m = LOGOUT_RE.search(line)
if m:
return player_leave(m.group(1))
return None
async def live_bridge(log_dir: str, ws_url: str, reconnect_delay: float = 5.0):
"""
Main live bridge loop.
Tails all Evennia log files and streams parsed events to Nexus WebSocket.
Auto-reconnects on failure.
"""
@@ -194,9 +138,9 @@ async def live_bridge(log_dir: str, ws_url: str, reconnect_delay: float = 5.0):
os.path.join(log_dir, "player_activity.log"),
os.path.join(log_dir, "server.log"),
]
event_queue: asyncio.Queue = asyncio.Queue(maxsize=10000)
async def tail_file(path: str):
"""Tail a single file and put events on queue."""
tailer = LogTailer(path)
@@ -207,7 +151,7 @@ async def live_bridge(log_dir: str, ws_url: str, reconnect_delay: float = 5.0):
event_queue.put_nowait(event)
except asyncio.QueueFull:
pass # Drop oldest if queue full
async def ws_sender():
"""Send events from queue to WebSocket, with auto-reconnect."""
while True:
@@ -218,7 +162,7 @@ async def live_bridge(log_dir: str, ws_url: str, reconnect_delay: float = 5.0):
event = await event_queue.get()
ts = event.get("timestamp", "")[:19]
print(f"[{ts}] {event['type']}: {json.dumps({k: v for k, v in event.items() if k not in ('type', 'timestamp')})}", flush=True)
print(f"[bridge] Connecting to {ws_url}...", flush=True)
async with websockets.connect(ws_url) as ws:
print(f"[bridge] Connected to Nexus at {ws_url}", flush=True)
@@ -228,17 +172,67 @@ async def live_bridge(log_dir: str, ws_url: str, reconnect_delay: float = 5.0):
except Exception as e:
print(f"[bridge] WebSocket error: {e}. Reconnecting in {reconnect_delay}s...", flush=True)
await asyncio.sleep(reconnect_delay)
# Start all tailers + sender
tasks = [asyncio.create_task(tail_file(f)) for f in log_files]
tasks.append(asyncio.create_task(ws_sender()))
print(f"[bridge] Live bridge started. Watching {len(log_files)} log files.", flush=True)
await asyncio.gather(*tasks)
async def playback(log_path: Path, ws_url: str):
"""Legacy mode: replay a telemetry JSONL file."""
from nexus.evennia_event_adapter import (
actor_located, command_issued, command_result,
room_snapshot, session_bound,
)
def clean_lines(text: str) -> list[str]:
text = strip_ansi(text).replace("\r", "")
return [line.strip() for line in text.split("\n") if line.strip()]
def parse_room_output(text: str):
lines = clean_lines(text)
if len(lines) < 2:
return None
title = lines[0]
desc = lines[1]
exits = []
objects = []
for line in lines[2:]:
if line.startswith("Exits:"):
raw = line.split(":", 1)[1].strip().replace(" and ", ", ")
exits = [{"key": t.strip(), "destination_id": t.strip().title(), "destination_key": t.strip().title()} for t in raw.split(",") if t.strip()]
elif line.startswith("You see:"):
raw = line.split(":", 1)[1].strip().replace(" and ", ", ")
parts = [t.strip() for t in raw.split(",") if t.strip()]
objects = [{"id": p.removeprefix("a ").removeprefix("an "), "key": p.removeprefix("a ").removeprefix("an "), "short_desc": p} for p in parts]
return {"title": title, "desc": desc, "exits": exits, "objects": objects}
def normalize_event(raw: dict, hermes_session_id: str) -> list[dict]:
out = []
event = raw.get("event")
actor = raw.get("actor", "Timmy")
timestamp = raw.get("timestamp")
if event == "connect":
out.append(session_bound(hermes_session_id, evennia_account=actor, evennia_character=actor, timestamp=timestamp))
parsed = parse_room_output(raw.get("output", ""))
if parsed:
out.append(actor_located(actor, parsed["title"], parsed["title"], timestamp=timestamp))
out.append(room_snapshot(parsed["title"], parsed["title"], parsed["desc"], exits=parsed["exits"], objects=parsed["objects"], timestamp=timestamp))
elif event == "command":
cmd = raw.get("command", "")
output = raw.get("output", "")
out.append(command_issued(hermes_session_id, actor, cmd, timestamp=timestamp))
success = not output.startswith("Command '") and not output.startswith("Could not find")
out.append(command_result(hermes_session_id, actor, cmd, strip_ansi(output), success=success, timestamp=timestamp))
parsed = parse_room_output(output)
if parsed:
out.append(actor_located(actor, parsed["title"], parsed["title"], timestamp=timestamp))
out.append(room_snapshot(parsed["title"], parsed["title"], parsed["desc"], exits=parsed["exits"], objects=parsed["objects"], timestamp=timestamp))
return out
hermes_session_id = log_path.stem
async with websockets.connect(ws_url) as ws:
for line in log_path.read_text(encoding="utf-8").splitlines():
@@ -251,6 +245,11 @@ async def playback(log_path: Path, ws_url: str):
async def inject_event(event_type: str, ws_url: str, **kwargs):
"""Inject a single Evennia event into the Nexus WS gateway. Dev/test use."""
from nexus.evennia_event_adapter import (
actor_located, command_issued, command_result,
room_snapshot, session_bound,
)
builders = {
"room_snapshot": lambda: room_snapshot(
kwargs.get("room_key", "Gate"),

View File

@@ -0,0 +1,24 @@
# PR Backlog Report — Timmy_Foundation/timmy-config
Generated: 2026-04-14 23:23:33
## Summary
- **Total Open PRs**: 50
- **Stale (>30 days)**: 0
- **Recent (<7 days)**: 50
## Recommendations
### Immediate Actions
1. **Review stale PRs**: 0 PRs are >30 days old
2. **Close duplicates**: Check for duplicate PRs on same issues
3. **Assign reviewers**: Ensure each PR has a reviewer
### Process Improvements
1. **Set SLAs**: Review within 48 hours, merge within 7 days
2. **Weekly cleanup**: Run this analyzer weekly
3. **Automate**: Add CI checks to prevent backlog
## Stale PRs (>30 days)

123
scripts/pr-backlog-analyzer.py Executable file
View File

@@ -0,0 +1,123 @@
#!/usr/bin/env python3
"""
PR Backlog Analyzer for timmy-config
Analyzes open PRs and provides recommendations for cleanup.
Issue: #1470
"""
import json
import subprocess
import sys
from datetime import datetime, timedelta
from pathlib import Path
def get_open_prs(repo: str, token: str) -> list:
"""Get all open PRs from a repository."""
result = subprocess.run([
"curl", "-s", "-H", f"Authorization: token {token}",
f"https://forge.alexanderwhitestone.com/api/v1/repos/{repo}/pulls?state=open&limit=100"
], capture_output=True, text=True)
if result.returncode != 0:
print(f"Error fetching PRs: {result.stderr}")
return []
return json.loads(result.stdout)
def analyze_pr(pr: dict) -> dict:
"""Analyze a single PR."""
created = datetime.fromisoformat(pr['created_at'].replace('Z', '+00:00'))
age_days = (datetime.now(created.tzinfo) - created).days
labels = [l['name'] for l in pr.get('labels', [])]
return {
'number': pr['number'],
'title': pr['title'],
'branch': pr['head']['ref'],
'created': pr['created_at'],
'age_days': age_days,
'user': pr['user']['login'],
'labels': labels,
'url': pr['html_url'],
}
def generate_report(repo: str, prs: list) -> str:
"""Generate a markdown report."""
stale = [p for p in prs if p['age_days'] > 30]
recent = [p for p in prs if p['age_days'] <= 7]
report = f"""# PR Backlog Report — {repo}
Generated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}
## Summary
- **Total Open PRs**: {len(prs)}
- **Stale (>30 days)**: {len(stale)}
- **Recent (<7 days)**: {len(recent)}
## Recommendations
### Immediate Actions
1. **Review stale PRs**: {len(stale)} PRs are >30 days old
2. **Close duplicates**: Check for duplicate PRs on same issues
3. **Assign reviewers**: Ensure each PR has a reviewer
### Process Improvements
1. **Set SLAs**: Review within 48 hours, merge within 7 days
2. **Weekly cleanup**: Run this analyzer weekly
3. **Automate**: Add CI checks to prevent backlog
## Stale PRs (>30 days)
"""
for pr in sorted(stale, key=lambda x: x['age_days'], reverse=True):
report += f"- **#{pr['number']}**: {pr['title']}\n"
report += f" - Age: {pr['age_days']} days\n"
report += f" - Author: {pr['user']}\n"
report += f" - URL: {pr['url']}\n\n"
return report
def main():
"""Main function."""
token_path = Path.home() / '.config' / 'gitea' / 'token'
if not token_path.exists():
print("Error: Gitea token not found")
sys.exit(1)
token = token_path.read_text().strip()
repo = "Timmy_Foundation/timmy-config"
print(f"Fetching PRs for {repo}...")
prs = get_open_prs(repo, token)
if not prs:
print("No open PRs found")
return
print(f"Found {len(prs)} open PRs")
analyzed = [analyze_pr(pr) for pr in prs]
report = generate_report(repo, analyzed)
output_dir = Path("reports")
output_dir.mkdir(exist_ok=True)
report_file = output_dir / f"pr-backlog-{datetime.now().strftime('%Y%m%d')}.md"
report_file.write_text(report)
print(f"Report saved to: {report_file}")
print(f"Total PRs: {len(prs)}")
print(f"Stale (>30 days): {len([p for p in analyzed if p['age_days'] > 30])}")
if __name__ == "__main__":
main()