Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3f45cae90a |
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"owner": "Timmy_Foundation",
|
||||
"repos": [
|
||||
"timmy-home",
|
||||
"timmy-config",
|
||||
"fleet-ops",
|
||||
"the-beacon",
|
||||
"the-door",
|
||||
"the-nexus"
|
||||
],
|
||||
"lookback_days": 14,
|
||||
"alert": {
|
||||
"recent_days": 7,
|
||||
"baseline_days": 7,
|
||||
"minimum_baseline_closed": 4,
|
||||
"drop_ratio": 0.6
|
||||
}
|
||||
}
|
||||
@@ -1,70 +0,0 @@
|
||||
# Burn-down Velocity Tracking
|
||||
|
||||
Refs #519.
|
||||
|
||||
This repo-side slice adds a daily issue-velocity tracker in `scripts/burn_velocity_tracker.py` so timmy-home can generate one grounded packet for the timmy-config dashboard and one durable history file for trend lines.
|
||||
|
||||
## What it emits
|
||||
|
||||
Daily run outputs:
|
||||
- `~/.timmy/burn-velocity/latest.json` — machine-readable payload for the timmy-config dashboard
|
||||
- `~/.timmy/burn-velocity/latest.md` — operator-facing markdown summary
|
||||
- `~/.timmy/burn-velocity/history.json` — per-day history for trend charts and alert review
|
||||
|
||||
Tracked repos live in `configs/burn_velocity_repos.json`.
|
||||
|
||||
## Cron command
|
||||
|
||||
```bash
|
||||
cd ~/timmy-home && \
|
||||
python3 scripts/burn_velocity_tracker.py \
|
||||
--config configs/burn_velocity_repos.json \
|
||||
--output-json ~/.timmy/burn-velocity/latest.json \
|
||||
--output-md ~/.timmy/burn-velocity/latest.md \
|
||||
--history-file ~/.timmy/burn-velocity/history.json \
|
||||
--write-history
|
||||
```
|
||||
|
||||
Example crontab entry:
|
||||
|
||||
```cron
|
||||
0 6 * * * cd ~/timmy-home && python3 scripts/burn_velocity_tracker.py --config configs/burn_velocity_repos.json --output-json ~/.timmy/burn-velocity/latest.json --output-md ~/.timmy/burn-velocity/latest.md --history-file ~/.timmy/burn-velocity/history.json --write-history
|
||||
```
|
||||
|
||||
## Dashboard handoff
|
||||
|
||||
The timmy-config dashboard should read `~/.timmy/burn-velocity/latest.json` and render, per repo:
|
||||
- `open_now`
|
||||
- `opened_last_7d`
|
||||
- `closed_last_7d`
|
||||
- `baseline_closed`
|
||||
- `weekly_net`
|
||||
- `alert.status`
|
||||
- `alert.kind`
|
||||
- `alert.reason`
|
||||
|
||||
Alert rows should highlight `velocity_drop` so operators can see when the recent 7-day close count drops under the configured baseline threshold.
|
||||
|
||||
## Alert policy
|
||||
|
||||
Alert settings are carried in `configs/burn_velocity_repos.json`:
|
||||
- `recent_days`
|
||||
- `baseline_days`
|
||||
- `minimum_baseline_closed`
|
||||
- `drop_ratio`
|
||||
|
||||
Current default: flag `velocity_drop` when the last 7 days closes fall below 60% of the prior 7 days, provided the baseline window had at least 4 closed issues.
|
||||
|
||||
## Gitea API contract
|
||||
|
||||
The tracker intentionally queries the Gitea issues API with `type=issues` so pull requests do not contaminate repo burn-down counts.
|
||||
|
||||
Live collection shape:
|
||||
- open backlog uses `/repos/{owner}/{repo}/issues?state=open&type=issues`
|
||||
- recent event scan uses `/repos/{owner}/{repo}/issues?state=all&type=issues&since=...`
|
||||
|
||||
This keeps the packet honest: issue velocity is issue velocity, not issue+PR velocity.
|
||||
|
||||
## Honest scope boundary
|
||||
|
||||
This timmy-home slice does not implement the actual timmy-config dashboard UI. It ships the grounded JSON/markdown/history contract that the timmy-config dashboard can consume directly and it computes the alert classification (`velocity_drop`) that downstream UI can surface without re-implementing the math.
|
||||
@@ -1,406 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Burn-down velocity tracker for Timmy Foundation issue throughput.
|
||||
|
||||
Refs: timmy-home #519
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
from datetime import date, datetime, time, timedelta, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from urllib import parse, request
|
||||
from base64 import b64encode
|
||||
|
||||
DEFAULT_BASE_URL = "https://forge.alexanderwhitestone.com/api/v1"
|
||||
DEFAULT_OWNER = "Timmy_Foundation"
|
||||
DEFAULT_TOKEN_FILE = Path.home() / ".config" / "gitea" / "token"
|
||||
DEFAULT_CONFIG_FILE = Path(__file__).resolve().parent.parent / "configs" / "burn_velocity_repos.json"
|
||||
DEFAULT_OUTPUT_DIR = Path.home() / ".timmy" / "burn-velocity"
|
||||
DEFAULT_OUTPUT_JSON = DEFAULT_OUTPUT_DIR / "latest.json"
|
||||
DEFAULT_OUTPUT_MD = DEFAULT_OUTPUT_DIR / "latest.md"
|
||||
DEFAULT_HISTORY_FILE = DEFAULT_OUTPUT_DIR / "history.json"
|
||||
DEFAULT_CONFIG = {
|
||||
"owner": DEFAULT_OWNER,
|
||||
"repos": ["timmy-home", "timmy-config", "fleet-ops", "the-beacon", "the-door", "the-nexus"],
|
||||
"lookback_days": 14,
|
||||
"alert": {
|
||||
"recent_days": 7,
|
||||
"baseline_days": 7,
|
||||
"minimum_baseline_closed": 4,
|
||||
"drop_ratio": 0.6,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def parse_iso8601(value: str | None) -> datetime | None:
|
||||
if not value:
|
||||
return None
|
||||
normalized = value.replace("Z", "+00:00")
|
||||
parsed = datetime.fromisoformat(normalized)
|
||||
if parsed.tzinfo is None:
|
||||
return parsed.replace(tzinfo=timezone.utc)
|
||||
return parsed.astimezone(timezone.utc)
|
||||
|
||||
|
||||
def normalize_today(value: str | date | None = None) -> date:
|
||||
if value is None:
|
||||
return datetime.now(timezone.utc).date()
|
||||
if isinstance(value, date):
|
||||
return value
|
||||
return date.fromisoformat(value)
|
||||
|
||||
|
||||
def build_day_window(today: date, lookback_days: int) -> list[date]:
|
||||
start = today - timedelta(days=lookback_days - 1)
|
||||
return [start + timedelta(days=offset) for offset in range(lookback_days)]
|
||||
|
||||
|
||||
def filter_issue_items(items: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
||||
return [item for item in items if not item.get("pull_request")]
|
||||
|
||||
|
||||
def build_daily_series(items: list[dict[str, Any]], today: date, lookback_days: int) -> list[dict[str, int | str]]:
|
||||
days = build_day_window(today, lookback_days)
|
||||
counts = {day.isoformat(): {"opened": 0, "closed": 0} for day in days}
|
||||
start_day = days[0]
|
||||
|
||||
for item in filter_issue_items(items):
|
||||
created_at = parse_iso8601(item.get("created_at"))
|
||||
if created_at is not None:
|
||||
created_day = created_at.date()
|
||||
if start_day <= created_day <= today:
|
||||
counts[created_day.isoformat()]["opened"] += 1
|
||||
|
||||
closed_at = parse_iso8601(item.get("closed_at"))
|
||||
if closed_at is not None:
|
||||
closed_day = closed_at.date()
|
||||
if start_day <= closed_day <= today:
|
||||
counts[closed_day.isoformat()]["closed"] += 1
|
||||
|
||||
return [
|
||||
{
|
||||
"date": day.isoformat(),
|
||||
"opened": counts[day.isoformat()]["opened"],
|
||||
"closed": counts[day.isoformat()]["closed"],
|
||||
}
|
||||
for day in days
|
||||
]
|
||||
|
||||
|
||||
def summarize_velocity_alert(
|
||||
*, recent_closed: int, baseline_closed: int, open_now: int, config: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
minimum_baseline = int(config.get("minimum_baseline_closed", 4))
|
||||
drop_ratio = float(config.get("drop_ratio", 0.6))
|
||||
|
||||
if baseline_closed >= minimum_baseline and recent_closed < baseline_closed * drop_ratio:
|
||||
return {
|
||||
"status": "drop",
|
||||
"kind": "velocity_drop",
|
||||
"recent_closed": recent_closed,
|
||||
"baseline_closed": baseline_closed,
|
||||
"reason": (
|
||||
f"velocity_drop: closed {recent_closed} in the last {config.get('recent_days', 7)}d "
|
||||
f"vs {baseline_closed} in the prior {config.get('baseline_days', 7)}d"
|
||||
),
|
||||
}
|
||||
|
||||
if open_now > 0 and baseline_closed >= minimum_baseline and recent_closed == 0:
|
||||
return {
|
||||
"status": "drop",
|
||||
"kind": "velocity_drop",
|
||||
"recent_closed": recent_closed,
|
||||
"baseline_closed": baseline_closed,
|
||||
"reason": "velocity_drop: no issues closed in the recent window while backlog is still open",
|
||||
}
|
||||
|
||||
return {
|
||||
"status": "ok",
|
||||
"kind": "none",
|
||||
"recent_closed": recent_closed,
|
||||
"baseline_closed": baseline_closed,
|
||||
"reason": "velocity stable",
|
||||
}
|
||||
|
||||
|
||||
def _sum_window(daily: list[dict[str, int | str]], field: str, days: int) -> int:
|
||||
if days <= 0:
|
||||
return 0
|
||||
return sum(int(item[field]) for item in daily[-days:])
|
||||
|
||||
|
||||
def _sum_baseline_window(daily: list[dict[str, int | str]], recent_days: int, baseline_days: int) -> int:
|
||||
if baseline_days <= 0:
|
||||
return 0
|
||||
if recent_days <= 0:
|
||||
return sum(int(item["closed"]) for item in daily[-baseline_days:])
|
||||
baseline_slice = daily[-(recent_days + baseline_days) : -recent_days]
|
||||
return sum(int(item["closed"]) for item in baseline_slice)
|
||||
|
||||
|
||||
def build_velocity_report(config: dict[str, Any], snapshot: dict[str, Any], today: str | date | None = None) -> dict[str, Any]:
|
||||
report_day = normalize_today(today)
|
||||
generated_at = snapshot.get("generated_at") or datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
|
||||
owner = config.get("owner", DEFAULT_OWNER)
|
||||
repos = list(config.get("repos") or sorted((snapshot.get("repos") or {}).keys()))
|
||||
lookback_days = int(config.get("lookback_days", 14))
|
||||
alert_config = dict(DEFAULT_CONFIG["alert"])
|
||||
alert_config.update(config.get("alert") or {})
|
||||
recent_days = int(alert_config.get("recent_days", 7))
|
||||
baseline_days = int(alert_config.get("baseline_days", 7))
|
||||
|
||||
repo_reports: list[dict[str, Any]] = []
|
||||
total_open_now = 0
|
||||
total_closed_last_7d = 0
|
||||
repos_with_alerts: list[str] = []
|
||||
|
||||
for repo_name in repos:
|
||||
repo_snapshot = (snapshot.get("repos") or {}).get(repo_name, {})
|
||||
open_issues = filter_issue_items(list(repo_snapshot.get("open_issues") or []))
|
||||
recent_issues = filter_issue_items(list(repo_snapshot.get("recent_issues") or []))
|
||||
daily = build_daily_series(recent_issues, report_day, lookback_days)
|
||||
|
||||
open_now = len(open_issues)
|
||||
opened_last_7d = _sum_window(daily, "opened", recent_days)
|
||||
closed_last_7d = _sum_window(daily, "closed", recent_days)
|
||||
baseline_closed = _sum_baseline_window(daily, recent_days, baseline_days)
|
||||
weekly_net = opened_last_7d - closed_last_7d
|
||||
alert = summarize_velocity_alert(
|
||||
recent_closed=closed_last_7d,
|
||||
baseline_closed=baseline_closed,
|
||||
open_now=open_now,
|
||||
config=alert_config,
|
||||
)
|
||||
|
||||
repo_report = {
|
||||
"repo": repo_name,
|
||||
"open_now": open_now,
|
||||
"opened_last_7d": opened_last_7d,
|
||||
"closed_last_7d": closed_last_7d,
|
||||
"baseline_closed": baseline_closed,
|
||||
"weekly_net": weekly_net,
|
||||
"daily": daily,
|
||||
"alert": alert,
|
||||
}
|
||||
repo_reports.append(repo_report)
|
||||
|
||||
total_open_now += open_now
|
||||
total_closed_last_7d += closed_last_7d
|
||||
if alert["status"] != "ok":
|
||||
repos_with_alerts.append(repo_name)
|
||||
|
||||
return {
|
||||
"owner": owner,
|
||||
"generated_at": generated_at,
|
||||
"generated_day": report_day.isoformat(),
|
||||
"lookback_days": lookback_days,
|
||||
"dashboard_contract_version": 1,
|
||||
"repos": repo_reports,
|
||||
"summary": {
|
||||
"total_open_now": total_open_now,
|
||||
"total_closed_last_7d": total_closed_last_7d,
|
||||
"repos_with_alerts": repos_with_alerts,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def render_markdown(report: dict[str, Any]) -> str:
|
||||
lines = [
|
||||
"# Burn-down Velocity Tracking",
|
||||
"",
|
||||
f"Generated: {report['generated_at']}",
|
||||
f"Owner: {report['owner']}",
|
||||
f"Lookback days: {report['lookback_days']}",
|
||||
"",
|
||||
"## Per-repo velocity",
|
||||
"",
|
||||
"| Repo | Open now | Opened 7d | Closed 7d | Previous 7d | Alert |",
|
||||
"| --- | ---: | ---: | ---: | ---: | --- |",
|
||||
]
|
||||
|
||||
for repo in report["repos"]:
|
||||
alert_label = repo["alert"]["kind"] if repo["alert"]["status"] != "ok" else "ok"
|
||||
lines.append(
|
||||
f"| {repo['repo']} | {repo['open_now']} | {repo['opened_last_7d']} | {repo['closed_last_7d']} | {repo['baseline_closed']} | {alert_label} |"
|
||||
)
|
||||
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
"## Dashboard handoff for timmy-config",
|
||||
"",
|
||||
"The timmy-config dashboard should consume `~/.timmy/burn-velocity/latest.json` and render, for each repo:",
|
||||
"- `open_now`",
|
||||
"- `opened_last_7d`",
|
||||
"- `closed_last_7d`",
|
||||
"- `baseline_closed`",
|
||||
"- `alert.status` / `alert.kind` / `alert.reason`",
|
||||
"",
|
||||
"Cron should also persist `~/.timmy/burn-velocity/history.json` so timmy-config can plot the daily trend line instead of only the latest snapshot.",
|
||||
"",
|
||||
"## Alerts",
|
||||
"",
|
||||
]
|
||||
)
|
||||
|
||||
alerts = [repo for repo in report["repos"] if repo["alert"]["status"] != "ok"]
|
||||
if not alerts:
|
||||
lines.append("- none")
|
||||
else:
|
||||
for repo in alerts:
|
||||
lines.append(f"- {repo['repo']}: {repo['alert']['reason']}")
|
||||
|
||||
return "\n".join(lines) + "\n"
|
||||
|
||||
|
||||
def update_history(history_path: Path, report: dict[str, Any]) -> dict[str, Any]:
|
||||
if history_path.exists():
|
||||
history = json.loads(history_path.read_text(encoding="utf-8"))
|
||||
else:
|
||||
history = {"days": []}
|
||||
|
||||
entry = {
|
||||
"date": report["generated_day"],
|
||||
"generated_at": report["generated_at"],
|
||||
"summary": report["summary"],
|
||||
"repos": report["repos"],
|
||||
}
|
||||
|
||||
retained = [item for item in history.get("days", []) if item.get("date") != report["generated_day"]]
|
||||
retained.append(entry)
|
||||
retained.sort(key=lambda item: item["date"])
|
||||
history["days"] = retained
|
||||
|
||||
history_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
history_path.write_text(json.dumps(history, indent=2), encoding="utf-8")
|
||||
return history
|
||||
|
||||
|
||||
class GiteaClient:
|
||||
def __init__(self, token: str, owner: str = DEFAULT_OWNER, base_url: str = DEFAULT_BASE_URL):
|
||||
self.token = token
|
||||
self.owner = owner
|
||||
self.base_url = base_url.rstrip("/")
|
||||
|
||||
def _headers(self) -> list[dict[str, str]]:
|
||||
return [
|
||||
{"Authorization": f"token {self.token}", "Accept": "application/json"},
|
||||
{
|
||||
"Authorization": "Basic " + b64encode(f"{self.token}:".encode()).decode(),
|
||||
"Accept": "application/json",
|
||||
},
|
||||
]
|
||||
|
||||
def _request_json(self, url: str) -> list[dict[str, Any]]:
|
||||
last_error: Exception | None = None
|
||||
for headers in self._headers():
|
||||
try:
|
||||
req = request.Request(url, headers=headers)
|
||||
with request.urlopen(req, timeout=30) as response:
|
||||
return json.loads(response.read().decode())
|
||||
except Exception as exc: # pragma: no cover - exercised only on live API failure
|
||||
last_error = exc
|
||||
if last_error is None: # pragma: no cover - defensive
|
||||
raise RuntimeError("request failed without an exception")
|
||||
raise last_error
|
||||
|
||||
def list_issues(self, repo: str, *, state: str, since: str | None = None) -> list[dict[str, Any]]:
|
||||
issues: list[dict[str, Any]] = []
|
||||
page = 1
|
||||
while True:
|
||||
query = {"state": state, "type": "issues", "limit": 100, "page": page}
|
||||
if since:
|
||||
query["since"] = since
|
||||
url = f"{self.base_url}/repos/{self.owner}/{repo}/issues?{parse.urlencode(query)}"
|
||||
batch = self._request_json(url)
|
||||
if not batch:
|
||||
break
|
||||
issues.extend(filter_issue_items(batch))
|
||||
page += 1
|
||||
return issues
|
||||
|
||||
|
||||
def load_json(path: Path, default: Any) -> Any:
|
||||
if not path.exists():
|
||||
return default
|
||||
return json.loads(path.read_text(encoding="utf-8"))
|
||||
|
||||
|
||||
def load_config(path: Path) -> dict[str, Any]:
|
||||
config = dict(DEFAULT_CONFIG)
|
||||
alert = dict(DEFAULT_CONFIG["alert"])
|
||||
raw = load_json(path, {})
|
||||
config.update(raw)
|
||||
alert.update(raw.get("alert") or {})
|
||||
config["alert"] = alert
|
||||
return config
|
||||
|
||||
|
||||
def collect_live_snapshot(
|
||||
config: dict[str, Any], *, today: str | date | None = None, token_file: Path = DEFAULT_TOKEN_FILE, base_url: str = DEFAULT_BASE_URL
|
||||
) -> dict[str, Any]:
|
||||
token = token_file.read_text(encoding="utf-8").strip()
|
||||
report_day = normalize_today(today)
|
||||
since_day = report_day - timedelta(days=int(config.get("lookback_days", 14)) - 1)
|
||||
since_timestamp = datetime.combine(since_day, time.min, tzinfo=timezone.utc).isoformat().replace("+00:00", "Z")
|
||||
client = GiteaClient(token=token, owner=config.get("owner", DEFAULT_OWNER), base_url=base_url)
|
||||
|
||||
repos = list(config.get("repos") or [])
|
||||
repo_payload = {}
|
||||
for repo in repos:
|
||||
repo_payload[repo] = {
|
||||
"open_issues": client.list_issues(repo, state="open"),
|
||||
"recent_issues": client.list_issues(repo, state="all", since=since_timestamp),
|
||||
}
|
||||
|
||||
return {
|
||||
"generated_at": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z"),
|
||||
"repos": repo_payload,
|
||||
}
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Track per-repo issue burn-down velocity and emit timmy-config dashboard payloads.")
|
||||
parser.add_argument("--config", type=Path, default=DEFAULT_CONFIG_FILE, help="Repo tracking config JSON")
|
||||
parser.add_argument("--snapshot-file", type=Path, help="Use a pre-fetched snapshot JSON instead of calling Gitea")
|
||||
parser.add_argument("--token-file", type=Path, default=DEFAULT_TOKEN_FILE, help="Gitea token file for live collection")
|
||||
parser.add_argument("--base-url", default=DEFAULT_BASE_URL, help="Gitea API base URL")
|
||||
parser.add_argument("--today", help="Override report date (YYYY-MM-DD)")
|
||||
parser.add_argument("--output-json", type=Path, default=DEFAULT_OUTPUT_JSON, help="Path for latest JSON payload")
|
||||
parser.add_argument("--output-md", type=Path, default=DEFAULT_OUTPUT_MD, help="Path for latest markdown summary")
|
||||
parser.add_argument("--history-file", type=Path, default=DEFAULT_HISTORY_FILE, help="Path for persisted daily history JSON")
|
||||
parser.add_argument("--write-history", action="store_true", help="Update the daily history file after generating the report")
|
||||
parser.add_argument("--json", action="store_true", help="Print JSON instead of markdown to stdout")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> None:
|
||||
args = parse_args()
|
||||
config = load_config(args.config)
|
||||
|
||||
if args.snapshot_file:
|
||||
snapshot = load_json(args.snapshot_file, {"repos": {}})
|
||||
else:
|
||||
snapshot = collect_live_snapshot(config, today=args.today, token_file=args.token_file, base_url=args.base_url)
|
||||
|
||||
report = build_velocity_report(config, snapshot, today=args.today)
|
||||
|
||||
args.output_json.parent.mkdir(parents=True, exist_ok=True)
|
||||
args.output_md.parent.mkdir(parents=True, exist_ok=True)
|
||||
args.output_json.write_text(json.dumps(report, indent=2), encoding="utf-8")
|
||||
args.output_md.write_text(render_markdown(report), encoding="utf-8")
|
||||
|
||||
if args.write_history:
|
||||
update_history(args.history_file, report)
|
||||
|
||||
if args.json:
|
||||
print(json.dumps(report, indent=2))
|
||||
else:
|
||||
print(render_markdown(report))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
313
scripts/cross_agent_quality_audit.py
Normal file
313
scripts/cross_agent_quality_audit.py
Normal file
@@ -0,0 +1,313 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Cross-agent quality audit — #518
|
||||
|
||||
Fetches all PRs across Timmy_Foundation repos, classifies by agent,
|
||||
and produces a merge-rate scorecard.
|
||||
|
||||
Usage:
|
||||
python scripts/cross_agent_quality_audit.py
|
||||
python scripts/cross_agent_quality_audit.py --scorecard timmy-config/agent-quality-scorecard.md
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import requests
|
||||
|
||||
GITEA_BASE = "https://forge.alexanderwhitestone.com/api/v1"
|
||||
ORG = "Timmy_Foundation"
|
||||
TOKEN = os.environ.get("GITEA_TOKEN") or (
|
||||
Path.home() / ".config" / "gitea" / "token"
|
||||
).read_text().strip()
|
||||
|
||||
HEADERS = {"Authorization": f"token {TOKEN}"}
|
||||
|
||||
# Repos to audit (active code repos)
|
||||
DEFAULT_REPOS = [
|
||||
"timmy-home",
|
||||
"hermes-agent",
|
||||
"the-nexus",
|
||||
"the-door",
|
||||
"fleet-ops",
|
||||
"burn-fleet",
|
||||
"the-playground",
|
||||
"compounding-intelligence",
|
||||
"the-beacon",
|
||||
"second-son-of-timmy",
|
||||
"timmy-academy",
|
||||
"timmy-config",
|
||||
]
|
||||
|
||||
|
||||
class AgentClassifier:
|
||||
"""Classify PRs by agent identity."""
|
||||
|
||||
# PR title prefixes that explicitly name an agent
|
||||
AGENT_TITLE_RE = re.compile(
|
||||
r"^\[(?P<agent>Claude|Ezra|Allegro|Bezalel|Timmy|Gemini|Kimi|Manus|Codex)\]",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
# Branch patterns that embed agent names
|
||||
AGENT_BRANCH_RE = re.compile(
|
||||
r"(?P<agent>claude|ezra|allegro|bezalel|timmy|gemini|kimi|manus|codex)",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def classify(cls, pr: Dict[str, Any]) -> str:
|
||||
title = pr.get("title", "")
|
||||
branch = pr.get("head", {}).get("ref", "")
|
||||
user = pr.get("user", {}).get("login", "")
|
||||
|
||||
# 1. Explicit title tag like [Claude] or [Ezra]
|
||||
m = cls.AGENT_TITLE_RE.match(title)
|
||||
if m:
|
||||
return m.group("agent").lower()
|
||||
|
||||
# 2. Branch contains agent name (e.g. claude/issue-123)
|
||||
m = cls.AGENT_BRANCH_RE.search(branch)
|
||||
if m:
|
||||
return m.group("agent").lower()
|
||||
|
||||
# 3. Git user mapping
|
||||
if user.lower() == "claude":
|
||||
return "claude"
|
||||
if user.lower() == "rockachopa":
|
||||
# Rockachopa is the human / orchestrator — map to "burn-loop"
|
||||
return "burn-loop"
|
||||
|
||||
return "unknown"
|
||||
|
||||
|
||||
def fetch_prs(repo: str, state: str = "all", per_page: int = 50) -> List[Dict[str, Any]]:
|
||||
"""Paginate through all PRs for a repo."""
|
||||
prs: List[Dict[str, Any]] = []
|
||||
page = 1
|
||||
while True:
|
||||
url = f"{GITEA_BASE}/repos/{ORG}/{repo}/pulls?state={state}&limit={per_page}&page={page}"
|
||||
resp = requests.get(url, headers=HEADERS, timeout=30)
|
||||
resp.raise_for_status()
|
||||
batch = resp.json()
|
||||
if not batch:
|
||||
break
|
||||
prs.extend(batch)
|
||||
if len(batch) < per_page:
|
||||
break
|
||||
page += 1
|
||||
return prs
|
||||
|
||||
|
||||
def parse_datetime(dt_str: Optional[str]) -> Optional[datetime]:
|
||||
if not dt_str:
|
||||
return None
|
||||
try:
|
||||
return datetime.fromisoformat(dt_str.replace("Z", "+00:00"))
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def hours_between(start: Optional[str], end: Optional[str]) -> Optional[float]:
|
||||
s = parse_datetime(start)
|
||||
e = parse_datetime(end)
|
||||
if s and e:
|
||||
return (e - s).total_seconds() / 3600
|
||||
return None
|
||||
|
||||
|
||||
def audit_repos(repos: List[str]) -> Dict[str, Any]:
|
||||
"""Run the audit and return aggregated stats."""
|
||||
agent_stats: Dict[str, Dict[str, Any]] = defaultdict(
|
||||
lambda: {
|
||||
"total": 0,
|
||||
"merged": 0,
|
||||
"closed_unmerged": 0,
|
||||
"open": 0,
|
||||
"hours_to_merge": [],
|
||||
"hours_to_close": [],
|
||||
"repos": set(),
|
||||
"prs": [],
|
||||
}
|
||||
)
|
||||
|
||||
repo_stats: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
for repo in repos:
|
||||
print(f"Fetching PRs for {repo} ...", file=sys.stderr)
|
||||
try:
|
||||
prs = fetch_prs(repo)
|
||||
except requests.HTTPError as exc:
|
||||
print(f" SKIP {repo}: {exc}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
repo_merged = 0
|
||||
repo_total = len(prs)
|
||||
for pr in prs:
|
||||
agent = AgentClassifier.classify(pr)
|
||||
s = agent_stats[agent]
|
||||
s["total"] += 1
|
||||
s["repos"].add(repo)
|
||||
s["prs"].append(
|
||||
{
|
||||
"repo": repo,
|
||||
"number": pr["number"],
|
||||
"title": pr["title"],
|
||||
"state": pr["state"],
|
||||
"merged": pr.get("merged", False),
|
||||
"created_at": pr.get("created_at"),
|
||||
"merged_at": pr.get("merged_at"),
|
||||
"closed_at": pr.get("closed_at"),
|
||||
}
|
||||
)
|
||||
|
||||
if pr.get("merged"):
|
||||
s["merged"] += 1
|
||||
repo_merged += 1
|
||||
h = hours_between(pr.get("created_at"), pr.get("merged_at"))
|
||||
if h is not None:
|
||||
s["hours_to_merge"].append(h)
|
||||
elif pr["state"] == "closed":
|
||||
s["closed_unmerged"] += 1
|
||||
h = hours_between(pr.get("created_at"), pr.get("closed_at"))
|
||||
if h is not None:
|
||||
s["hours_to_close"].append(h)
|
||||
else:
|
||||
s["open"] += 1
|
||||
|
||||
repo_stats[repo] = {
|
||||
"total": repo_total,
|
||||
"merged": repo_merged,
|
||||
"merge_rate": round(repo_merged / repo_total, 2) if repo_total else 0,
|
||||
}
|
||||
|
||||
# Compute derived metrics
|
||||
summary = {}
|
||||
for agent, s in sorted(agent_stats.items(), key=lambda x: -x[1]["total"]):
|
||||
total = s["total"]
|
||||
merged = s["merged"]
|
||||
closed = s["closed_unmerged"]
|
||||
resolved = merged + closed
|
||||
merge_rate = round(merged / resolved, 3) if resolved else 0
|
||||
avg_merge_hours = (
|
||||
round(sum(s["hours_to_merge"]) / len(s["hours_to_merge"]), 1)
|
||||
if s["hours_to_merge"]
|
||||
else None
|
||||
)
|
||||
avg_close_hours = (
|
||||
round(sum(s["hours_to_close"]) / len(s["hours_to_close"]), 1)
|
||||
if s["hours_to_close"]
|
||||
else None
|
||||
)
|
||||
summary[agent] = {
|
||||
"total_prs": total,
|
||||
"merged": merged,
|
||||
"closed_unmerged": closed,
|
||||
"open": s["open"],
|
||||
"merge_rate": merge_rate,
|
||||
"rejection_rate": round(closed / resolved, 3) if resolved else 0,
|
||||
"avg_hours_to_merge": avg_merge_hours,
|
||||
"avg_hours_to_close": avg_close_hours,
|
||||
"repos": sorted(s["repos"]),
|
||||
}
|
||||
|
||||
return {
|
||||
"audited_at": datetime.now(timezone.utc).isoformat(),
|
||||
"repos_audited": repos,
|
||||
"repo_stats": repo_stats,
|
||||
"agent_summary": summary,
|
||||
"raw_prs": {a: s["prs"] for a, s in agent_stats.items()},
|
||||
}
|
||||
|
||||
|
||||
def render_scorecard(data: Dict[str, Any]) -> str:
|
||||
"""Render a markdown scorecard."""
|
||||
lines = [
|
||||
"# Cross-Agent Quality Scorecard",
|
||||
"",
|
||||
f"**Audited at:** {data['audited_at']}",
|
||||
f"**Repos audited:** {', '.join(data['repos_audited'])}",
|
||||
"",
|
||||
"## Per-Agent Summary",
|
||||
"",
|
||||
"| Agent | Total PRs | Merged | Closed (unmerged) | Open | Merge Rate | Rejection Rate | Avg Hours to Merge | Avg Hours to Close |",
|
||||
"|---|---|---:|---:|---:|---:|---:|---:|---:|",
|
||||
]
|
||||
|
||||
for agent, s in data["agent_summary"].items():
|
||||
merge_hours = f"{s['avg_hours_to_merge']:.1f}" if s["avg_hours_to_merge"] is not None else "—"
|
||||
close_hours = f"{s['avg_hours_to_close']:.1f}" if s["avg_hours_to_close"] is not None else "—"
|
||||
lines.append(
|
||||
f"| {agent} | {s['total_prs']} | {s['merged']} | {s['closed_unmerged']} | "
|
||||
f"{s['open']} | {s['merge_rate']:.1%} | {s['rejection_rate']:.1%} | "
|
||||
f"{merge_hours} | {close_hours} |"
|
||||
)
|
||||
|
||||
lines.extend([
|
||||
"",
|
||||
"## Per-Repo Merge Rate",
|
||||
"",
|
||||
"| Repo | Total PRs | Merged | Merge Rate |",
|
||||
"|---|---|---:|---:|",
|
||||
])
|
||||
|
||||
for repo, s in sorted(data["repo_stats"].items(), key=lambda x: -x[1]["total"]):
|
||||
lines.append(
|
||||
f"| {repo} | {s['total']} | {s['merged']} | {s['merge_rate']:.1%} |"
|
||||
)
|
||||
|
||||
lines.extend([
|
||||
"",
|
||||
"## Methodology",
|
||||
"",
|
||||
"- **Agent classification** uses three signals in priority order:",
|
||||
" 1. Explicit title tag (e.g. `[Claude]`, `[Ezra]`)",
|
||||
" 2. Branch name containing agent name (e.g. `claude/issue-123`)",
|
||||
" 3. Git user (`claude` → claude, `Rockachopa` → burn-loop)",
|
||||
"- **Merge rate** = merged / (merged + closed_unmerged). Open PRs are excluded.",
|
||||
"- **Rejection rate** = closed_unmerged / (merged + closed_unmerged).",
|
||||
"- **Time metrics** are computed from created_at to merged_at / closed_at.",
|
||||
"",
|
||||
"## Raw Data",
|
||||
"",
|
||||
"```json",
|
||||
json.dumps(data["agent_summary"], indent=2),
|
||||
"```",
|
||||
"",
|
||||
])
|
||||
|
||||
return "\n".join(lines) + "\n"
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description="Cross-agent quality audit")
|
||||
parser.add_argument("--repos", nargs="+", default=DEFAULT_REPOS, help="Repos to audit")
|
||||
parser.add_argument("--scorecard", default="timmy-config/agent-quality-scorecard.md", help="Output path")
|
||||
parser.add_argument("--json", default=None, help="Also write raw JSON to path")
|
||||
args = parser.parse_args()
|
||||
|
||||
data = audit_repos(args.repos)
|
||||
|
||||
scorecard_path = Path(args.scorecard)
|
||||
scorecard_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
scorecard_path.write_text(render_scorecard(data))
|
||||
print(f"Scorecard written to {scorecard_path}", file=sys.stderr)
|
||||
|
||||
if args.json:
|
||||
json_path = Path(args.json)
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
json_path.write_text(json.dumps(data, indent=2, default=str))
|
||||
print(f"Raw JSON written to {json_path}", file=sys.stderr)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
@@ -1,176 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import date
|
||||
from pathlib import Path
|
||||
|
||||
from scripts.burn_velocity_tracker import build_velocity_report, render_markdown, update_history
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parent.parent
|
||||
DOC_PATH = ROOT / "docs" / "BURN_VELOCITY_TRACKING.md"
|
||||
|
||||
|
||||
SNAPSHOT = {
|
||||
"generated_at": "2026-04-22T12:00:00Z",
|
||||
"repos": {
|
||||
"timmy-home": {
|
||||
"open_issues": [
|
||||
{"number": 501, "state": "open", "created_at": "2026-04-20T09:00:00Z"},
|
||||
{"number": 502, "state": "open", "created_at": "2026-04-22T07:00:00Z"},
|
||||
],
|
||||
"recent_issues": [
|
||||
{"number": 401, "state": "closed", "created_at": "2026-04-21T09:00:00Z", "closed_at": "2026-04-22T05:30:00Z"},
|
||||
{"number": 402, "state": "closed", "created_at": "2026-04-20T09:00:00Z", "closed_at": "2026-04-21T05:30:00Z"},
|
||||
{"number": 403, "state": "closed", "created_at": "2026-04-19T09:00:00Z", "closed_at": "2026-04-20T05:30:00Z"},
|
||||
{"number": 404, "state": "closed", "created_at": "2026-04-14T09:00:00Z", "closed_at": "2026-04-15T05:30:00Z"},
|
||||
{"number": 405, "state": "closed", "created_at": "2026-04-13T09:00:00Z", "closed_at": "2026-04-14T05:30:00Z"},
|
||||
{"number": 406, "state": "closed", "created_at": "2026-04-12T09:00:00Z", "closed_at": "2026-04-13T05:30:00Z"},
|
||||
{"number": 407, "state": "closed", "created_at": "2026-04-11T09:00:00Z", "closed_at": "2026-04-12T05:30:00Z"},
|
||||
{"number": 408, "state": "closed", "created_at": "2026-04-10T09:00:00Z", "closed_at": "2026-04-11T05:30:00Z"},
|
||||
{"number": 409, "state": "closed", "created_at": "2026-04-09T09:00:00Z", "closed_at": "2026-04-10T05:30:00Z"},
|
||||
{"number": 410, "state": "closed", "created_at": "2026-04-08T09:00:00Z", "closed_at": "2026-04-09T05:30:00Z"},
|
||||
{"number": 411, "state": "closed", "created_at": "2026-04-07T09:00:00Z", "closed_at": "2026-04-08T05:30:00Z"},
|
||||
{"number": 412, "state": "closed", "created_at": "2026-04-06T09:00:00Z", "closed_at": "2026-04-07T05:30:00Z"},
|
||||
{"number": 413, "state": "closed", "created_at": "2026-04-05T09:00:00Z", "closed_at": "2026-04-06T05:30:00Z"},
|
||||
{"number": 414, "state": "open", "created_at": "2026-04-22T08:45:00Z", "closed_at": None},
|
||||
{"number": 415, "state": "open", "created_at": "2026-04-17T08:45:00Z", "closed_at": None},
|
||||
],
|
||||
},
|
||||
"timmy-config": {
|
||||
"open_issues": [
|
||||
{"number": 601, "state": "open", "created_at": "2026-04-18T09:00:00Z"},
|
||||
],
|
||||
"recent_issues": [
|
||||
{"number": 602, "state": "closed", "created_at": "2026-04-20T09:00:00Z", "closed_at": "2026-04-21T06:00:00Z"},
|
||||
{"number": 603, "state": "open", "created_at": "2026-04-22T06:00:00Z", "closed_at": None},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
CONFIG = {
|
||||
"owner": "Timmy_Foundation",
|
||||
"repos": ["timmy-home", "timmy-config"],
|
||||
"lookback_days": 14,
|
||||
"alert": {
|
||||
"recent_days": 7,
|
||||
"baseline_days": 7,
|
||||
"minimum_baseline_closed": 4,
|
||||
"drop_ratio": 0.6,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def test_build_velocity_report_counts_opened_closed_and_flags_drop_alert() -> None:
|
||||
report = build_velocity_report(CONFIG, SNAPSHOT, today=date(2026, 4, 22))
|
||||
|
||||
assert report["generated_day"] == "2026-04-22"
|
||||
assert report["summary"]["repos_with_alerts"] == ["timmy-home"]
|
||||
assert report["summary"]["total_open_now"] == 3
|
||||
|
||||
home = report["repos"][0]
|
||||
assert home["repo"] == "timmy-home"
|
||||
assert home["open_now"] == 2
|
||||
assert home["opened_last_7d"] == 5
|
||||
assert home["closed_last_7d"] == 3
|
||||
assert home["baseline_closed"] == 7
|
||||
assert home["weekly_net"] == 2
|
||||
assert home["alert"]["status"] == "drop"
|
||||
assert home["alert"]["recent_closed"] == 3
|
||||
assert home["daily"][-1] == {"date": "2026-04-22", "opened": 1, "closed": 1}
|
||||
|
||||
timmy_config = report["repos"][1]
|
||||
assert timmy_config["repo"] == "timmy-config"
|
||||
assert timmy_config["open_now"] == 1
|
||||
assert timmy_config["closed_last_7d"] == 1
|
||||
assert timmy_config["alert"]["status"] == "ok"
|
||||
|
||||
|
||||
def test_render_markdown_includes_dashboard_handoff_and_alerts() -> None:
|
||||
report = build_velocity_report(CONFIG, SNAPSHOT, today=date(2026, 4, 22))
|
||||
rendered = render_markdown(report)
|
||||
|
||||
for snippet in (
|
||||
"# Burn-down Velocity Tracking",
|
||||
"## Per-repo velocity",
|
||||
"timmy-home",
|
||||
"timmy-config",
|
||||
"## Dashboard handoff for timmy-config",
|
||||
"velocity_drop",
|
||||
"## Alerts",
|
||||
):
|
||||
assert snippet in rendered
|
||||
|
||||
|
||||
def test_update_history_replaces_same_day_snapshot(tmp_path: Path) -> None:
|
||||
history_path = tmp_path / "burn-velocity-history.json"
|
||||
report = build_velocity_report(CONFIG, SNAPSHOT, today=date(2026, 4, 22))
|
||||
update_history(history_path, report)
|
||||
|
||||
updated = json.loads(json.dumps(report))
|
||||
updated["repos"][0]["open_now"] = 9
|
||||
updated["summary"]["total_open_now"] = 10
|
||||
update_history(history_path, updated)
|
||||
|
||||
history = json.loads(history_path.read_text(encoding="utf-8"))
|
||||
assert [item["date"] for item in history["days"]] == ["2026-04-22"]
|
||||
assert history["days"][0]["summary"]["total_open_now"] == 10
|
||||
assert history["days"][0]["repos"][0]["open_now"] == 9
|
||||
|
||||
|
||||
def test_cli_writes_json_markdown_and_history_from_snapshot(tmp_path: Path) -> None:
|
||||
snapshot_path = tmp_path / "snapshot.json"
|
||||
output_json = tmp_path / "latest.json"
|
||||
output_md = tmp_path / "latest.md"
|
||||
history_path = tmp_path / "history.json"
|
||||
snapshot_path.write_text(json.dumps(SNAPSHOT), encoding="utf-8")
|
||||
|
||||
result = subprocess.run(
|
||||
[
|
||||
sys.executable,
|
||||
"-m",
|
||||
"scripts.burn_velocity_tracker",
|
||||
"--snapshot-file",
|
||||
str(snapshot_path),
|
||||
"--today",
|
||||
"2026-04-22",
|
||||
"--output-json",
|
||||
str(output_json),
|
||||
"--output-md",
|
||||
str(output_md),
|
||||
"--history-file",
|
||||
str(history_path),
|
||||
"--write-history",
|
||||
"--json",
|
||||
],
|
||||
check=True,
|
||||
cwd=ROOT,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
payload = json.loads(result.stdout)
|
||||
assert payload["summary"]["repos_with_alerts"] == ["timmy-home"]
|
||||
assert output_json.exists()
|
||||
assert output_md.exists()
|
||||
assert history_path.exists()
|
||||
assert "timmy-config" in output_md.read_text(encoding="utf-8")
|
||||
|
||||
|
||||
def test_repo_contains_burn_velocity_tracking_doc() -> None:
|
||||
text = DOC_PATH.read_text(encoding="utf-8")
|
||||
required = [
|
||||
"# Burn-down Velocity Tracking",
|
||||
"python3 scripts/burn_velocity_tracker.py",
|
||||
"configs/burn_velocity_repos.json",
|
||||
"~/.timmy/burn-velocity/latest.json",
|
||||
"timmy-config dashboard",
|
||||
"type=issues",
|
||||
"velocity_drop",
|
||||
]
|
||||
for snippet in required:
|
||||
assert snippet in text
|
||||
45
tests/test_cross_agent_quality_audit.py
Normal file
45
tests/test_cross_agent_quality_audit.py
Normal file
@@ -0,0 +1,45 @@
|
||||
"""Tests for cross_agent_quality_audit.py — #518."""
|
||||
|
||||
import pytest
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent / "scripts"))
|
||||
|
||||
from cross_agent_quality_audit import AgentClassifier, hours_between
|
||||
|
||||
|
||||
class TestAgentClassifier:
|
||||
def test_title_tag_claude(self):
|
||||
pr = {"title": "[Claude] fix auth middleware", "head": {"ref": "fix/123"}, "user": {"login": "rockachopa"}}
|
||||
assert AgentClassifier.classify(pr) == "claude"
|
||||
|
||||
def test_title_tag_ezra(self):
|
||||
pr = {"title": "[Ezra] tmux fleet launcher", "head": {"ref": "burn/10"}, "user": {"login": "rockachopa"}}
|
||||
assert AgentClassifier.classify(pr) == "ezra"
|
||||
|
||||
def test_branch_name_claude(self):
|
||||
pr = {"title": "fix auth", "head": {"ref": "claude/issue-1695"}, "user": {"login": "rockachopa"}}
|
||||
assert AgentClassifier.classify(pr) == "claude"
|
||||
|
||||
def test_user_mapping(self):
|
||||
pr = {"title": "some fix", "head": {"ref": "fix/1"}, "user": {"login": "claude"}}
|
||||
assert AgentClassifier.classify(pr) == "claude"
|
||||
|
||||
def test_rockachopa_maps_to_burn_loop(self):
|
||||
pr = {"title": "some fix", "head": {"ref": "fix/1"}, "user": {"login": "Rockachopa"}}
|
||||
assert AgentClassifier.classify(pr) == "burn-loop"
|
||||
|
||||
def test_unknown_fallback(self):
|
||||
pr = {"title": "some fix", "head": {"ref": "fix/1"}, "user": {"login": "random"}}
|
||||
assert AgentClassifier.classify(pr) == "unknown"
|
||||
|
||||
|
||||
class TestHoursBetween:
|
||||
def test_same_day(self):
|
||||
h = hours_between("2026-04-22T10:00:00Z", "2026-04-22T12:00:00Z")
|
||||
assert h == 2.0
|
||||
|
||||
def test_none_returns_none(self):
|
||||
assert hours_between(None, "2026-04-22T12:00:00Z") is None
|
||||
assert hours_between("2026-04-22T10:00:00Z", None) is None
|
||||
244
timmy-config/agent-quality-scorecard.md
Normal file
244
timmy-config/agent-quality-scorecard.md
Normal file
@@ -0,0 +1,244 @@
|
||||
# Cross-Agent Quality Scorecard
|
||||
|
||||
**Audited at:** 2026-04-22T06:17:43.574309+00:00
|
||||
**Repos audited:** timmy-home, hermes-agent, the-nexus, the-door, fleet-ops, burn-fleet, the-playground, compounding-intelligence, the-beacon, second-son-of-timmy, timmy-academy, timmy-config
|
||||
|
||||
## Per-Agent Summary
|
||||
|
||||
| Agent | Total PRs | Merged | Closed (unmerged) | Open | Merge Rate | Rejection Rate | Avg Hours to Merge | Avg Hours to Close |
|
||||
|---|---|---:|---:|---:|---:|---:|---:|---:|
|
||||
| burn-loop | 1733 | 346 | 1239 | 148 | 21.8% | 78.2% | 18.9 | 20.6 |
|
||||
| unknown | 843 | 598 | 214 | 31 | 73.6% | 26.4% | 2.3 | 11.3 |
|
||||
| claude | 264 | 138 | 121 | 5 | 53.3% | 46.7% | 3.3 | 6.2 |
|
||||
| gemini | 95 | 24 | 70 | 1 | 25.5% | 74.5% | 0.5 | 11.3 |
|
||||
| timmy | 28 | 15 | 11 | 2 | 57.7% | 42.3% | 9.8 | 20.2 |
|
||||
| bezalel | 21 | 11 | 9 | 1 | 55.0% | 45.0% | 2.7 | 8.0 |
|
||||
| allegro | 21 | 7 | 11 | 3 | 38.9% | 61.1% | 31.1 | 20.2 |
|
||||
| ezra | 8 | 2 | 3 | 3 | 40.0% | 60.0% | 4.4 | 16.8 |
|
||||
| kimi | 6 | 3 | 3 | 0 | 50.0% | 50.0% | 39.5 | 0.5 |
|
||||
| manus | 6 | 5 | 1 | 0 | 83.3% | 16.7% | 0.0 | 18.8 |
|
||||
| codex | 2 | 2 | 0 | 0 | 100.0% | 0.0% | 2.3 | — |
|
||||
|
||||
## Per-Repo Merge Rate
|
||||
|
||||
| Repo | Total PRs | Merged | Merge Rate |
|
||||
|---|---|---:|---:|
|
||||
| the-nexus | 985 | 501 | 51.0% |
|
||||
| hermes-agent | 519 | 128 | 25.0% |
|
||||
| timmy-config | 404 | 140 | 35.0% |
|
||||
| timmy-home | 270 | 104 | 39.0% |
|
||||
| fleet-ops | 266 | 84 | 32.0% |
|
||||
| the-beacon | 175 | 62 | 35.0% |
|
||||
| the-door | 153 | 31 | 20.0% |
|
||||
| second-son-of-timmy | 111 | 82 | 74.0% |
|
||||
| compounding-intelligence | 50 | 9 | 18.0% |
|
||||
| the-playground | 44 | 2 | 5.0% |
|
||||
| burn-fleet | 38 | 2 | 5.0% |
|
||||
| timmy-academy | 12 | 6 | 50.0% |
|
||||
|
||||
## Methodology
|
||||
|
||||
- **Agent classification** uses three signals in priority order:
|
||||
1. Explicit title tag (e.g. `[Claude]`, `[Ezra]`)
|
||||
2. Branch name containing agent name (e.g. `claude/issue-123`)
|
||||
3. Git user (`claude` → claude, `Rockachopa` → burn-loop)
|
||||
- **Merge rate** = merged / (merged + closed_unmerged). Open PRs are excluded.
|
||||
- **Rejection rate** = closed_unmerged / (merged + closed_unmerged).
|
||||
- **Time metrics** are computed from created_at to merged_at / closed_at.
|
||||
|
||||
## Raw Data
|
||||
|
||||
```json
|
||||
{
|
||||
"burn-loop": {
|
||||
"total_prs": 1733,
|
||||
"merged": 346,
|
||||
"closed_unmerged": 1239,
|
||||
"open": 148,
|
||||
"merge_rate": 0.218,
|
||||
"rejection_rate": 0.782,
|
||||
"avg_hours_to_merge": 18.9,
|
||||
"avg_hours_to_close": 20.6,
|
||||
"repos": [
|
||||
"burn-fleet",
|
||||
"compounding-intelligence",
|
||||
"fleet-ops",
|
||||
"hermes-agent",
|
||||
"second-son-of-timmy",
|
||||
"the-beacon",
|
||||
"the-door",
|
||||
"the-nexus",
|
||||
"the-playground",
|
||||
"timmy-academy",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"unknown": {
|
||||
"total_prs": 843,
|
||||
"merged": 598,
|
||||
"closed_unmerged": 214,
|
||||
"open": 31,
|
||||
"merge_rate": 0.736,
|
||||
"rejection_rate": 0.264,
|
||||
"avg_hours_to_merge": 2.3,
|
||||
"avg_hours_to_close": 11.3,
|
||||
"repos": [
|
||||
"fleet-ops",
|
||||
"hermes-agent",
|
||||
"second-son-of-timmy",
|
||||
"the-beacon",
|
||||
"the-door",
|
||||
"the-nexus",
|
||||
"timmy-academy",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"claude": {
|
||||
"total_prs": 264,
|
||||
"merged": 138,
|
||||
"closed_unmerged": 121,
|
||||
"open": 5,
|
||||
"merge_rate": 0.533,
|
||||
"rejection_rate": 0.467,
|
||||
"avg_hours_to_merge": 3.3,
|
||||
"avg_hours_to_close": 6.2,
|
||||
"repos": [
|
||||
"hermes-agent",
|
||||
"the-nexus",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"gemini": {
|
||||
"total_prs": 95,
|
||||
"merged": 24,
|
||||
"closed_unmerged": 70,
|
||||
"open": 1,
|
||||
"merge_rate": 0.255,
|
||||
"rejection_rate": 0.745,
|
||||
"avg_hours_to_merge": 0.5,
|
||||
"avg_hours_to_close": 11.3,
|
||||
"repos": [
|
||||
"hermes-agent",
|
||||
"the-nexus",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"timmy": {
|
||||
"total_prs": 28,
|
||||
"merged": 15,
|
||||
"closed_unmerged": 11,
|
||||
"open": 2,
|
||||
"merge_rate": 0.577,
|
||||
"rejection_rate": 0.423,
|
||||
"avg_hours_to_merge": 9.8,
|
||||
"avg_hours_to_close": 20.2,
|
||||
"repos": [
|
||||
"burn-fleet",
|
||||
"hermes-agent",
|
||||
"the-nexus",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"bezalel": {
|
||||
"total_prs": 21,
|
||||
"merged": 11,
|
||||
"closed_unmerged": 9,
|
||||
"open": 1,
|
||||
"merge_rate": 0.55,
|
||||
"rejection_rate": 0.45,
|
||||
"avg_hours_to_merge": 2.7,
|
||||
"avg_hours_to_close": 8.0,
|
||||
"repos": [
|
||||
"burn-fleet",
|
||||
"hermes-agent",
|
||||
"the-beacon",
|
||||
"the-nexus",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"allegro": {
|
||||
"total_prs": 21,
|
||||
"merged": 7,
|
||||
"closed_unmerged": 11,
|
||||
"open": 3,
|
||||
"merge_rate": 0.389,
|
||||
"rejection_rate": 0.611,
|
||||
"avg_hours_to_merge": 31.1,
|
||||
"avg_hours_to_close": 20.2,
|
||||
"repos": [
|
||||
"burn-fleet",
|
||||
"hermes-agent",
|
||||
"the-beacon",
|
||||
"the-nexus",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"ezra": {
|
||||
"total_prs": 8,
|
||||
"merged": 2,
|
||||
"closed_unmerged": 3,
|
||||
"open": 3,
|
||||
"merge_rate": 0.4,
|
||||
"rejection_rate": 0.6,
|
||||
"avg_hours_to_merge": 4.4,
|
||||
"avg_hours_to_close": 16.8,
|
||||
"repos": [
|
||||
"burn-fleet",
|
||||
"fleet-ops",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"kimi": {
|
||||
"total_prs": 6,
|
||||
"merged": 3,
|
||||
"closed_unmerged": 3,
|
||||
"open": 0,
|
||||
"merge_rate": 0.5,
|
||||
"rejection_rate": 0.5,
|
||||
"avg_hours_to_merge": 39.5,
|
||||
"avg_hours_to_close": 0.5,
|
||||
"repos": [
|
||||
"hermes-agent",
|
||||
"the-nexus",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"manus": {
|
||||
"total_prs": 6,
|
||||
"merged": 5,
|
||||
"closed_unmerged": 1,
|
||||
"open": 0,
|
||||
"merge_rate": 0.833,
|
||||
"rejection_rate": 0.167,
|
||||
"avg_hours_to_merge": 0.0,
|
||||
"avg_hours_to_close": 18.8,
|
||||
"repos": [
|
||||
"the-nexus",
|
||||
"timmy-config"
|
||||
]
|
||||
},
|
||||
"codex": {
|
||||
"total_prs": 2,
|
||||
"merged": 2,
|
||||
"closed_unmerged": 0,
|
||||
"open": 0,
|
||||
"merge_rate": 1.0,
|
||||
"rejection_rate": 0.0,
|
||||
"avg_hours_to_merge": 2.3,
|
||||
"avg_hours_to_close": null,
|
||||
"repos": [
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Reference in New Issue
Block a user