Compare commits
1 Commits
step35/459
...
fix/518
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3f45cae90a |
@@ -169,14 +169,6 @@ _config_version: 9
|
||||
session_reset:
|
||||
mode: none
|
||||
idle_minutes: 0
|
||||
blackboard:
|
||||
enabled: true
|
||||
redis:
|
||||
url: redis://localhost:6379/0
|
||||
password: ""
|
||||
keyspace_prefix: timmy
|
||||
ttl_seconds: 3600
|
||||
fallback_to_memory: true
|
||||
custom_providers:
|
||||
- name: Local Ollama
|
||||
base_url: http://localhost:11434/v1
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
# Local Redis Blackboard for Agent Coordination
|
||||
|
||||
This directory contains the Redis deployment for the Timmy Home "Blackboard" — a
|
||||
shared coordination layer for multi-agent orchestration.
|
||||
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
Redis will be available at `redis://localhost:6379` with persistence enabled.
|
||||
|
||||
## Stop
|
||||
|
||||
```bash
|
||||
docker-compose down # Stop, keep data
|
||||
docker-compose down -v # Stop and delete data
|
||||
```
|
||||
@@ -1,18 +0,0 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
container_name: timmy-redis
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "6379:6379"
|
||||
volumes:
|
||||
- ./data:/data
|
||||
command: ["redis-server", "--appendonly", "yes"]
|
||||
networks:
|
||||
- timmy-network
|
||||
|
||||
networks:
|
||||
timmy-network:
|
||||
driver: bridge
|
||||
313
scripts/cross_agent_quality_audit.py
Normal file
313
scripts/cross_agent_quality_audit.py
Normal file
@@ -0,0 +1,313 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Cross-agent quality audit — #518
|
||||
|
||||
Fetches all PRs across Timmy_Foundation repos, classifies by agent,
|
||||
and produces a merge-rate scorecard.
|
||||
|
||||
Usage:
|
||||
python scripts/cross_agent_quality_audit.py
|
||||
python scripts/cross_agent_quality_audit.py --scorecard timmy-config/agent-quality-scorecard.md
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import requests
|
||||
|
||||
GITEA_BASE = "https://forge.alexanderwhitestone.com/api/v1"
|
||||
ORG = "Timmy_Foundation"
|
||||
TOKEN = os.environ.get("GITEA_TOKEN") or (
|
||||
Path.home() / ".config" / "gitea" / "token"
|
||||
).read_text().strip()
|
||||
|
||||
HEADERS = {"Authorization": f"token {TOKEN}"}
|
||||
|
||||
# Repos to audit (active code repos)
|
||||
DEFAULT_REPOS = [
|
||||
"timmy-home",
|
||||
"hermes-agent",
|
||||
"the-nexus",
|
||||
"the-door",
|
||||
"fleet-ops",
|
||||
"burn-fleet",
|
||||
"the-playground",
|
||||
"compounding-intelligence",
|
||||
"the-beacon",
|
||||
"second-son-of-timmy",
|
||||
"timmy-academy",
|
||||
"timmy-config",
|
||||
]
|
||||
|
||||
|
||||
class AgentClassifier:
|
||||
"""Classify PRs by agent identity."""
|
||||
|
||||
# PR title prefixes that explicitly name an agent
|
||||
AGENT_TITLE_RE = re.compile(
|
||||
r"^\[(?P<agent>Claude|Ezra|Allegro|Bezalel|Timmy|Gemini|Kimi|Manus|Codex)\]",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
# Branch patterns that embed agent names
|
||||
AGENT_BRANCH_RE = re.compile(
|
||||
r"(?P<agent>claude|ezra|allegro|bezalel|timmy|gemini|kimi|manus|codex)",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def classify(cls, pr: Dict[str, Any]) -> str:
|
||||
title = pr.get("title", "")
|
||||
branch = pr.get("head", {}).get("ref", "")
|
||||
user = pr.get("user", {}).get("login", "")
|
||||
|
||||
# 1. Explicit title tag like [Claude] or [Ezra]
|
||||
m = cls.AGENT_TITLE_RE.match(title)
|
||||
if m:
|
||||
return m.group("agent").lower()
|
||||
|
||||
# 2. Branch contains agent name (e.g. claude/issue-123)
|
||||
m = cls.AGENT_BRANCH_RE.search(branch)
|
||||
if m:
|
||||
return m.group("agent").lower()
|
||||
|
||||
# 3. Git user mapping
|
||||
if user.lower() == "claude":
|
||||
return "claude"
|
||||
if user.lower() == "rockachopa":
|
||||
# Rockachopa is the human / orchestrator — map to "burn-loop"
|
||||
return "burn-loop"
|
||||
|
||||
return "unknown"
|
||||
|
||||
|
||||
def fetch_prs(repo: str, state: str = "all", per_page: int = 50) -> List[Dict[str, Any]]:
|
||||
"""Paginate through all PRs for a repo."""
|
||||
prs: List[Dict[str, Any]] = []
|
||||
page = 1
|
||||
while True:
|
||||
url = f"{GITEA_BASE}/repos/{ORG}/{repo}/pulls?state={state}&limit={per_page}&page={page}"
|
||||
resp = requests.get(url, headers=HEADERS, timeout=30)
|
||||
resp.raise_for_status()
|
||||
batch = resp.json()
|
||||
if not batch:
|
||||
break
|
||||
prs.extend(batch)
|
||||
if len(batch) < per_page:
|
||||
break
|
||||
page += 1
|
||||
return prs
|
||||
|
||||
|
||||
def parse_datetime(dt_str: Optional[str]) -> Optional[datetime]:
|
||||
if not dt_str:
|
||||
return None
|
||||
try:
|
||||
return datetime.fromisoformat(dt_str.replace("Z", "+00:00"))
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def hours_between(start: Optional[str], end: Optional[str]) -> Optional[float]:
|
||||
s = parse_datetime(start)
|
||||
e = parse_datetime(end)
|
||||
if s and e:
|
||||
return (e - s).total_seconds() / 3600
|
||||
return None
|
||||
|
||||
|
||||
def audit_repos(repos: List[str]) -> Dict[str, Any]:
|
||||
"""Run the audit and return aggregated stats."""
|
||||
agent_stats: Dict[str, Dict[str, Any]] = defaultdict(
|
||||
lambda: {
|
||||
"total": 0,
|
||||
"merged": 0,
|
||||
"closed_unmerged": 0,
|
||||
"open": 0,
|
||||
"hours_to_merge": [],
|
||||
"hours_to_close": [],
|
||||
"repos": set(),
|
||||
"prs": [],
|
||||
}
|
||||
)
|
||||
|
||||
repo_stats: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
for repo in repos:
|
||||
print(f"Fetching PRs for {repo} ...", file=sys.stderr)
|
||||
try:
|
||||
prs = fetch_prs(repo)
|
||||
except requests.HTTPError as exc:
|
||||
print(f" SKIP {repo}: {exc}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
repo_merged = 0
|
||||
repo_total = len(prs)
|
||||
for pr in prs:
|
||||
agent = AgentClassifier.classify(pr)
|
||||
s = agent_stats[agent]
|
||||
s["total"] += 1
|
||||
s["repos"].add(repo)
|
||||
s["prs"].append(
|
||||
{
|
||||
"repo": repo,
|
||||
"number": pr["number"],
|
||||
"title": pr["title"],
|
||||
"state": pr["state"],
|
||||
"merged": pr.get("merged", False),
|
||||
"created_at": pr.get("created_at"),
|
||||
"merged_at": pr.get("merged_at"),
|
||||
"closed_at": pr.get("closed_at"),
|
||||
}
|
||||
)
|
||||
|
||||
if pr.get("merged"):
|
||||
s["merged"] += 1
|
||||
repo_merged += 1
|
||||
h = hours_between(pr.get("created_at"), pr.get("merged_at"))
|
||||
if h is not None:
|
||||
s["hours_to_merge"].append(h)
|
||||
elif pr["state"] == "closed":
|
||||
s["closed_unmerged"] += 1
|
||||
h = hours_between(pr.get("created_at"), pr.get("closed_at"))
|
||||
if h is not None:
|
||||
s["hours_to_close"].append(h)
|
||||
else:
|
||||
s["open"] += 1
|
||||
|
||||
repo_stats[repo] = {
|
||||
"total": repo_total,
|
||||
"merged": repo_merged,
|
||||
"merge_rate": round(repo_merged / repo_total, 2) if repo_total else 0,
|
||||
}
|
||||
|
||||
# Compute derived metrics
|
||||
summary = {}
|
||||
for agent, s in sorted(agent_stats.items(), key=lambda x: -x[1]["total"]):
|
||||
total = s["total"]
|
||||
merged = s["merged"]
|
||||
closed = s["closed_unmerged"]
|
||||
resolved = merged + closed
|
||||
merge_rate = round(merged / resolved, 3) if resolved else 0
|
||||
avg_merge_hours = (
|
||||
round(sum(s["hours_to_merge"]) / len(s["hours_to_merge"]), 1)
|
||||
if s["hours_to_merge"]
|
||||
else None
|
||||
)
|
||||
avg_close_hours = (
|
||||
round(sum(s["hours_to_close"]) / len(s["hours_to_close"]), 1)
|
||||
if s["hours_to_close"]
|
||||
else None
|
||||
)
|
||||
summary[agent] = {
|
||||
"total_prs": total,
|
||||
"merged": merged,
|
||||
"closed_unmerged": closed,
|
||||
"open": s["open"],
|
||||
"merge_rate": merge_rate,
|
||||
"rejection_rate": round(closed / resolved, 3) if resolved else 0,
|
||||
"avg_hours_to_merge": avg_merge_hours,
|
||||
"avg_hours_to_close": avg_close_hours,
|
||||
"repos": sorted(s["repos"]),
|
||||
}
|
||||
|
||||
return {
|
||||
"audited_at": datetime.now(timezone.utc).isoformat(),
|
||||
"repos_audited": repos,
|
||||
"repo_stats": repo_stats,
|
||||
"agent_summary": summary,
|
||||
"raw_prs": {a: s["prs"] for a, s in agent_stats.items()},
|
||||
}
|
||||
|
||||
|
||||
def render_scorecard(data: Dict[str, Any]) -> str:
|
||||
"""Render a markdown scorecard."""
|
||||
lines = [
|
||||
"# Cross-Agent Quality Scorecard",
|
||||
"",
|
||||
f"**Audited at:** {data['audited_at']}",
|
||||
f"**Repos audited:** {', '.join(data['repos_audited'])}",
|
||||
"",
|
||||
"## Per-Agent Summary",
|
||||
"",
|
||||
"| Agent | Total PRs | Merged | Closed (unmerged) | Open | Merge Rate | Rejection Rate | Avg Hours to Merge | Avg Hours to Close |",
|
||||
"|---|---|---:|---:|---:|---:|---:|---:|---:|",
|
||||
]
|
||||
|
||||
for agent, s in data["agent_summary"].items():
|
||||
merge_hours = f"{s['avg_hours_to_merge']:.1f}" if s["avg_hours_to_merge"] is not None else "—"
|
||||
close_hours = f"{s['avg_hours_to_close']:.1f}" if s["avg_hours_to_close"] is not None else "—"
|
||||
lines.append(
|
||||
f"| {agent} | {s['total_prs']} | {s['merged']} | {s['closed_unmerged']} | "
|
||||
f"{s['open']} | {s['merge_rate']:.1%} | {s['rejection_rate']:.1%} | "
|
||||
f"{merge_hours} | {close_hours} |"
|
||||
)
|
||||
|
||||
lines.extend([
|
||||
"",
|
||||
"## Per-Repo Merge Rate",
|
||||
"",
|
||||
"| Repo | Total PRs | Merged | Merge Rate |",
|
||||
"|---|---|---:|---:|",
|
||||
])
|
||||
|
||||
for repo, s in sorted(data["repo_stats"].items(), key=lambda x: -x[1]["total"]):
|
||||
lines.append(
|
||||
f"| {repo} | {s['total']} | {s['merged']} | {s['merge_rate']:.1%} |"
|
||||
)
|
||||
|
||||
lines.extend([
|
||||
"",
|
||||
"## Methodology",
|
||||
"",
|
||||
"- **Agent classification** uses three signals in priority order:",
|
||||
" 1. Explicit title tag (e.g. `[Claude]`, `[Ezra]`)",
|
||||
" 2. Branch name containing agent name (e.g. `claude/issue-123`)",
|
||||
" 3. Git user (`claude` → claude, `Rockachopa` → burn-loop)",
|
||||
"- **Merge rate** = merged / (merged + closed_unmerged). Open PRs are excluded.",
|
||||
"- **Rejection rate** = closed_unmerged / (merged + closed_unmerged).",
|
||||
"- **Time metrics** are computed from created_at to merged_at / closed_at.",
|
||||
"",
|
||||
"## Raw Data",
|
||||
"",
|
||||
"```json",
|
||||
json.dumps(data["agent_summary"], indent=2),
|
||||
"```",
|
||||
"",
|
||||
])
|
||||
|
||||
return "\n".join(lines) + "\n"
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description="Cross-agent quality audit")
|
||||
parser.add_argument("--repos", nargs="+", default=DEFAULT_REPOS, help="Repos to audit")
|
||||
parser.add_argument("--scorecard", default="timmy-config/agent-quality-scorecard.md", help="Output path")
|
||||
parser.add_argument("--json", default=None, help="Also write raw JSON to path")
|
||||
args = parser.parse_args()
|
||||
|
||||
data = audit_repos(args.repos)
|
||||
|
||||
scorecard_path = Path(args.scorecard)
|
||||
scorecard_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
scorecard_path.write_text(render_scorecard(data))
|
||||
print(f"Scorecard written to {scorecard_path}", file=sys.stderr)
|
||||
|
||||
if args.json:
|
||||
json_path = Path(args.json)
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
json_path.write_text(json.dumps(data, indent=2, default=str))
|
||||
print(f"Raw JSON written to {json_path}", file=sys.stderr)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
@@ -1,311 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Blackboard — Redis-backed shared coordination layer.
|
||||
|
||||
Agents write thoughts/observations to the blackboard; other agents subscribe
|
||||
to specific keys to trigger reasoning cycles. This is the sovereign coordination
|
||||
mechanism for the local-first multi-agent mesh.
|
||||
|
||||
Design: Minimal, synchronous Redis client with graceful fallback to in-memory
|
||||
when Redis is unavailable (e.g., during local dev without Docker).
|
||||
|
||||
SOUL.md: "Sovereignty and service always." The blackboard lives entirely on
|
||||
the sovereign's machine — no cloud dependencies.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from dataclasses import dataclass, asdict
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Iterable, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Lazy import to keep redis optional
|
||||
_redis = None
|
||||
_redis_import_error = None
|
||||
|
||||
try:
|
||||
import redis
|
||||
_redis = redis
|
||||
except ImportError as e:
|
||||
_redis_import_error = e
|
||||
|
||||
|
||||
@dataclass
|
||||
class BlackboardConfig:
|
||||
"""Configuration for the Blackboard."""
|
||||
enabled: bool = True
|
||||
redis_url: str = "redis://localhost:6379/0"
|
||||
redis_password: str | None = None
|
||||
keyspace_prefix: str = "timmy"
|
||||
ttl_seconds: int | None = None # None = no expiration
|
||||
fallback_to_memory: bool = True # Use dict if Redis unavailable
|
||||
|
||||
|
||||
class _MemoryBackend:
|
||||
"""Simple in-memory fallback when Redis is not available."""
|
||||
def __init__(self):
|
||||
self._store: dict[str, str] = {}
|
||||
self._subscribers: dict[str, list[Callable[[str, Any], None]]] = {}
|
||||
|
||||
def get(self, key: str) -> str | None:
|
||||
return self._store.get(key)
|
||||
|
||||
def set(self, key: str, value: str, ttl: int | None = None) -> bool:
|
||||
self._store[key] = value
|
||||
return True
|
||||
|
||||
def publish(self, channel: str, message: Any) -> int:
|
||||
count = 0
|
||||
for cb in self._subscribers.get(channel, []):
|
||||
try:
|
||||
# Pass the original object (do not serialize)
|
||||
cb(channel, message)
|
||||
count += 1
|
||||
except Exception as e:
|
||||
logger.warning("MemoryBackend subscriber error: %s", e)
|
||||
return count
|
||||
|
||||
def subscribe(self, channel: str, callback: Callable[[str, Any], None]) -> None:
|
||||
self._subscribers.setdefault(channel, []).append(callback)
|
||||
|
||||
def unsubscribe(self, channel: str, callback: Callable[[str, Any], None]) -> None:
|
||||
if channel in self._subscribers:
|
||||
self._subscribers[channel].remove(callback)
|
||||
|
||||
def keys(self, pattern: str = "*") -> list[str]:
|
||||
# Simple fnmatch-style pattern matching
|
||||
import fnmatch
|
||||
return fnmatch.filter(list(self._store.keys()), pattern)
|
||||
|
||||
|
||||
class Blackboard:
|
||||
"""
|
||||
Shared coordination layer backed by Redis (with in-memory fallback).
|
||||
|
||||
Usage:
|
||||
bb = Blackboard()
|
||||
bb.set("agent:timmy:thought", "checking queue...")
|
||||
value = bb.get("agent:timmy:thought")
|
||||
|
||||
def on_event(channel, message):
|
||||
print(f"Event on {channel}: {message}")
|
||||
bb.subscribe("dispatch:new", on_event)
|
||||
bb.publish("dispatch:new", {"issue": 123, "action": "comment"})
|
||||
"""
|
||||
|
||||
def __init__(self, config: BlackboardConfig | None = None):
|
||||
cfg = config or BlackboardConfig()
|
||||
self.enabled = cfg.enabled
|
||||
self.prefix = cfg.keyspace_prefix
|
||||
self.ttl = cfg.ttl_seconds
|
||||
self._backend: _MemoryBackend | Any
|
||||
|
||||
if not _redis:
|
||||
if cfg.fallback_to_memory:
|
||||
logger.warning(
|
||||
"redis-py not installed; using in-memory fallback. "
|
||||
"Install with: pip install redis"
|
||||
)
|
||||
self._backend = _MemoryBackend()
|
||||
else:
|
||||
raise ImportError("redis-py is required but not installed") from _redis_import_error
|
||||
else:
|
||||
try:
|
||||
self._backend = _redis.from_url(
|
||||
cfg.redis_url,
|
||||
password=cfg.redis_password,
|
||||
decode_responses=True,
|
||||
)
|
||||
# Test connection
|
||||
self._backend.ping()
|
||||
logger.info("Blackboard connected to Redis at %s", cfg.redis_url)
|
||||
except Exception as e:
|
||||
if cfg.fallback_to_memory:
|
||||
logger.warning("Redis connection failed (%s); falling back to in-memory", e)
|
||||
self._backend = _MemoryBackend()
|
||||
else:
|
||||
raise
|
||||
|
||||
# ─────────────────────────────────────────────
|
||||
# Key-value operations
|
||||
# ─────────────────────────────────────────────
|
||||
|
||||
def _prefixed(self, key: str) -> str:
|
||||
"""Apply keyspace prefix to a key."""
|
||||
return f"{self.prefix}:{key}" if self.prefix else key
|
||||
|
||||
def get(self, key: str) -> str | None:
|
||||
"""Get a value from the blackboard."""
|
||||
return self._backend.get(self._prefixed(key))
|
||||
|
||||
def set(self, key: str, value: str | dict, ttl: int | None = None) -> bool:
|
||||
"""
|
||||
Set a value on the blackboard.
|
||||
|
||||
Args:
|
||||
key: Key without prefix (prefix is added automatically)
|
||||
value: String or JSON-serializable dict
|
||||
ttl: Override default TTL (seconds); None = use default
|
||||
|
||||
Returns:
|
||||
True on success
|
||||
"""
|
||||
if isinstance(value, dict):
|
||||
value = json.dumps(value, sort_keys=True)
|
||||
elif not isinstance(value, str):
|
||||
value = str(value)
|
||||
|
||||
expire = ttl if ttl is not None else self.ttl
|
||||
result = self._backend.set(self._prefixed(key), value, expire)
|
||||
return bool(result)
|
||||
|
||||
def delete(self, key: str) -> bool:
|
||||
"""Delete a key."""
|
||||
try:
|
||||
return bool(self._backend.delete(self._prefixed(key)))
|
||||
except AttributeError:
|
||||
# MemoryBackend
|
||||
k = self._prefixed(key)
|
||||
if k in self._backend._store:
|
||||
del self._backend._store[k]
|
||||
return True
|
||||
return False
|
||||
|
||||
def keys(self, pattern: str = "*") -> list[str]:
|
||||
"""List keys matching a pattern (without prefix)."""
|
||||
full_pattern = self._prefixed(pattern)
|
||||
raw_keys = self._backend.keys(full_pattern)
|
||||
# Strip prefix
|
||||
prefix_len = len(self.prefix) + 1 if self.prefix else 0
|
||||
return [k[prefix_len:] if k.startswith(f"{self.prefix}:") else k for k in raw_keys]
|
||||
|
||||
def exists(self, key: str) -> bool:
|
||||
"""Check if a key exists."""
|
||||
try:
|
||||
return bool(self._backend.exists(self._prefixed(key)))
|
||||
except AttributeError:
|
||||
# MemoryBackend
|
||||
return self._prefixed(key) in self._backend._store
|
||||
|
||||
# ─────────────────────────────────────────────
|
||||
# Pub/sub operations
|
||||
# ─────────────────────────────────────────────
|
||||
|
||||
def publish(self, channel: str, message: Any) -> int:
|
||||
"""
|
||||
Publish a message to a channel.
|
||||
|
||||
Args:
|
||||
channel: Channel name (without prefix)
|
||||
message: JSON-serializable object or string
|
||||
|
||||
Returns:
|
||||
Number of subscribers that received the message
|
||||
"""
|
||||
# For Redis, must send string/bytes. For MemoryBackend, pass object.
|
||||
if isinstance(self._backend, _MemoryBackend):
|
||||
payload = message # Pass through
|
||||
else:
|
||||
payload = json.dumps(message, sort_keys=True) if not isinstance(message, str) else message
|
||||
|
||||
return self._backend.publish(self._prefixed(channel), payload)
|
||||
|
||||
def subscribe(
|
||||
self,
|
||||
channel: str,
|
||||
callback: Callable[[str, Any], None],
|
||||
*,
|
||||
block: bool = False,
|
||||
timeout: float | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
Subscribe to a channel.
|
||||
|
||||
Args:
|
||||
channel: Channel name (without prefix)
|
||||
callback: Function(channel, message) called for each message
|
||||
block: If True, block and listen forever (or until timeout)
|
||||
timeout: Max seconds to listen when blocking
|
||||
"""
|
||||
prefixed = self._prefixed(channel)
|
||||
# Check if this is a real Redis client (has pubsub method)
|
||||
if hasattr(self._backend, 'pubsub') and callable(getattr(self._backend, 'pubsub', None)):
|
||||
# Real Redis pub/sub
|
||||
import threading
|
||||
pubsub = self._backend.pubsub()
|
||||
pubsub.subscribe(prefixed)
|
||||
|
||||
def listener():
|
||||
for msg in pubsub.listen():
|
||||
if msg['type'] == 'message':
|
||||
try:
|
||||
data = json.loads(msg['data'])
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
data = msg['data']
|
||||
callback(channel, data)
|
||||
|
||||
if block:
|
||||
t = threading.Thread(target=listener, daemon=True)
|
||||
t.start()
|
||||
if timeout:
|
||||
t.join(timeout)
|
||||
else:
|
||||
t.join()
|
||||
else:
|
||||
# Fire-and-forget thread
|
||||
threading.Thread(target=listener, daemon=True).start()
|
||||
else:
|
||||
# MemoryBackend — synchronous callback registration
|
||||
self._backend.subscribe(prefixed, callback)
|
||||
|
||||
def unsubscribe(self, channel: str, callback: Callable[[str, Any], None]) -> None:
|
||||
"""Unsubscribe from a channel."""
|
||||
try:
|
||||
self._backend.unsubscribe(self._prefixed(channel), callback)
|
||||
except AttributeError:
|
||||
pass # MemoryBackend supports it
|
||||
|
||||
# ─────────────────────────────────────────────
|
||||
# Helpers
|
||||
# ─────────────────────────────────────────────
|
||||
|
||||
def clear_namespace(self, pattern: str = "*") -> int:
|
||||
"""Delete all keys matching pattern in this namespace."""
|
||||
full = self._prefixed(pattern)
|
||||
try:
|
||||
keys = self._backend.keys(full)
|
||||
if keys:
|
||||
return self._backend.delete(*keys)
|
||||
return 0
|
||||
except AttributeError:
|
||||
store_keys = list(self._backend._store.keys())
|
||||
import fnmatch
|
||||
matched = fnmatch.filter(store_keys, full)
|
||||
for k in matched:
|
||||
del self._backend._store[k]
|
||||
return len(matched)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Blackboard prefix={self.prefix!r} backend={type(self._backend).__name__}>"
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────
|
||||
# Convenience singleton for global use
|
||||
# ─────────────────────────────────────────────
|
||||
|
||||
_default_blackboard: Blackboard | None = None
|
||||
|
||||
|
||||
def get_blackboard(config: BlackboardConfig | None = None) -> Blackboard:
|
||||
"""Get or create the global Blackboard singleton."""
|
||||
global _default_blackboard
|
||||
if _default_blackboard is None:
|
||||
_default_blackboard = Blackboard(config)
|
||||
return _default_blackboard
|
||||
@@ -1,194 +0,0 @@
|
||||
"""
|
||||
Smoke tests for Blackboard — ensures the Redis-backed coordination layer
|
||||
works with both real Redis and in-memory fallback.
|
||||
"""
|
||||
|
||||
import json
|
||||
import time
|
||||
|
||||
import pytest
|
||||
|
||||
from src.timmy.blackboard import Blackboard, BlackboardConfig, _MemoryBackend
|
||||
|
||||
|
||||
class TestBlackboardBasics:
|
||||
"""Test core key-value operations."""
|
||||
|
||||
def test_kv_memory_backend(self):
|
||||
"""KV operations work using in-memory backend."""
|
||||
bb = Blackboard(BlackboardConfig(fallback_to_memory=True, enabled=True))
|
||||
|
||||
# Set and get
|
||||
assert bb.set("test:key", "hello") is True
|
||||
assert bb.get("test:key") == "hello"
|
||||
|
||||
# Dict serialization
|
||||
assert bb.set("test:obj", {"a": 1, "b": 2}) is True
|
||||
val = bb.get("test:obj")
|
||||
assert json.loads(val) == {"a": 1, "b": 2}
|
||||
|
||||
# Exists
|
||||
assert bb.exists("test:key") is True
|
||||
assert bb.exists("missing") is False
|
||||
|
||||
# Delete
|
||||
assert bb.delete("test:key") is True
|
||||
assert bb.get("test:key") is None
|
||||
|
||||
# Keys with prefix
|
||||
bb.set("agent:timmy:state", "ready")
|
||||
bb.set("agent:ezra:state", "idle")
|
||||
keys = bb.keys("agent:*:state")
|
||||
assert len(keys) == 2
|
||||
assert "timmy" in keys[0] or "ezra" in keys[0]
|
||||
|
||||
# Clear namespace
|
||||
assert bb.clear_namespace("agent:*") == 2
|
||||
assert bb.keys("agent:*") == []
|
||||
|
||||
|
||||
class TestBlackboardPubSub:
|
||||
"""Test pub/sub coordination patterns."""
|
||||
|
||||
def test_pubsub_memory_backend(self):
|
||||
"""Publish/subscribe works using in-memory backend."""
|
||||
bb = Blackboard(BlackboardConfig(fallback_to_memory=True, enabled=True))
|
||||
|
||||
received = []
|
||||
|
||||
def callback(channel, message):
|
||||
received.append((channel, message))
|
||||
|
||||
bb.subscribe("dispatch:new", callback)
|
||||
|
||||
# Publish
|
||||
count = bb.publish("dispatch:new", {"issue": 123, "action": "comment"})
|
||||
assert count == 1
|
||||
assert len(received) == 1
|
||||
ch, msg = received[0]
|
||||
assert ch == "dispatch:new"
|
||||
assert msg == {"issue": 123, "action": "comment"}
|
||||
|
||||
bb.unsubscribe("dispatch:new", callback)
|
||||
bb.publish("dispatch:new", {"should": "not arrive"})
|
||||
assert len(received) == 1 # no new messages
|
||||
|
||||
def test_publish_without_subscribers(self):
|
||||
"""Publish returns 0 when no subscribers."""
|
||||
bb = Blackboard(BlackboardConfig(fallback_to_memory=True, enabled=True))
|
||||
count = bb.publish("empty:channel", {"msg": 1})
|
||||
assert count == 0
|
||||
|
||||
|
||||
class TestBlackboardConfig:
|
||||
"""Test configuration parsing and validation."""
|
||||
|
||||
def test_default_config(self):
|
||||
cfg = BlackboardConfig()
|
||||
assert cfg.enabled is True
|
||||
assert cfg.redis_url == "redis://localhost:6379/0"
|
||||
assert cfg.keyspace_prefix == "timmy"
|
||||
assert cfg.ttl_seconds == 3600
|
||||
assert cfg.fallback_to_memory is True
|
||||
|
||||
def test_custom_config(self):
|
||||
cfg = BlackboardConfig(
|
||||
enabled=False,
|
||||
redis_url="redis://192.168.1.10:6379/1",
|
||||
keyspace_prefix="myagent",
|
||||
ttl_seconds=1800,
|
||||
fallback_to_memory=False,
|
||||
)
|
||||
assert cfg.enabled is False
|
||||
assert cfg.redis_url == "redis://192.168.1.10:6379/1"
|
||||
assert cfg.keyspace_prefix == "myagent"
|
||||
assert cfg.ttl_seconds == 1800
|
||||
assert cfg.fallback_to_memory is False
|
||||
|
||||
|
||||
class TestKeyspacePrefix:
|
||||
"""Test that keys are correctly prefixed."""
|
||||
|
||||
def test_prefixed_keys(self):
|
||||
bb = Blackboard(BlackboardConfig(keyspace_prefix="myagent", fallback_to_memory=True))
|
||||
bb.set("thought", "test")
|
||||
# Internal key should be "myagent:thought"
|
||||
# We can verify by checking keys()
|
||||
keys = bb.keys("*")
|
||||
assert any("myagent:thought" in k for k in keys)
|
||||
|
||||
|
||||
class TestBlackboardIntegration:
|
||||
"""Integration pattern: agent thought cycle."""
|
||||
|
||||
def test_agent_thought_cycle(self):
|
||||
"""Simulate Timmy writing a thought and Ezra reading it."""
|
||||
bb = Blackboard(BlackboardConfig(fallback_to_memory=True, enabled=True))
|
||||
|
||||
# Agent A writes observation
|
||||
bb.set("agent:timmy:observation", "Gitea queue has 12 open issues")
|
||||
|
||||
# Agent B reads
|
||||
obs = bb.get("agent:timmy:observation")
|
||||
assert obs == "Gitea queue has 12 open issues"
|
||||
|
||||
# Agent B writes analysis
|
||||
bb.set("agent:ezra:analysis", "Prioritize critical bugs first")
|
||||
|
||||
# Event-driven pattern
|
||||
events = []
|
||||
|
||||
def on_plan(channel, message):
|
||||
events.append(message)
|
||||
|
||||
bb.subscribe("fleet:plan", on_plan)
|
||||
bb.publish("fleet:plan", {"phase": "triaging", "lead": "ezra"})
|
||||
|
||||
assert len(events) == 1
|
||||
assert events[0]["phase"] == "triaging"
|
||||
|
||||
|
||||
class TestTTL:
|
||||
"""Test TTL handling (where supported)."""
|
||||
|
||||
def test_ttl_set_in_config(self):
|
||||
cfg = BlackboardConfig(ttl_seconds=60, fallback_to_memory=True)
|
||||
bb = Blackboard(cfg)
|
||||
assert bb.ttl == 60
|
||||
# Setting a value uses TTL from config
|
||||
bb.set("temp:key", "expiring value")
|
||||
# In memory backend ignores TTL, but value is set
|
||||
assert bb.get("temp:key") == "expiring value"
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────
|
||||
# CLI smoke — can be called directly: python -m tests.test_blackboard
|
||||
# ─────────────────────────────────────────────
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
print("Running Blackboard smoke tests...")
|
||||
|
||||
suite = [
|
||||
TestBlackboardBasics().test_kv_memory_backend,
|
||||
TestBlackboardPubSub().test_pubsub_memory_backend,
|
||||
TestBlackboardConfig().test_default_config,
|
||||
TestBlackboardIntegration().test_agent_thought_cycle,
|
||||
]
|
||||
|
||||
failures = 0
|
||||
for test in suite:
|
||||
name = test.__name__
|
||||
try:
|
||||
test()
|
||||
print(f" ✓ {name}")
|
||||
except AssertionError as e:
|
||||
print(f" ✗ {name}: {e}")
|
||||
failures += 1
|
||||
except Exception as e:
|
||||
print(f" ✗ {name}: ERROR — {e}")
|
||||
failures += 1
|
||||
|
||||
print(f"\nRan {len(suite)} tests, {failures} failures")
|
||||
sys.exit(failures)
|
||||
45
tests/test_cross_agent_quality_audit.py
Normal file
45
tests/test_cross_agent_quality_audit.py
Normal file
@@ -0,0 +1,45 @@
|
||||
"""Tests for cross_agent_quality_audit.py — #518."""
|
||||
|
||||
import pytest
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent / "scripts"))
|
||||
|
||||
from cross_agent_quality_audit import AgentClassifier, hours_between
|
||||
|
||||
|
||||
class TestAgentClassifier:
|
||||
def test_title_tag_claude(self):
|
||||
pr = {"title": "[Claude] fix auth middleware", "head": {"ref": "fix/123"}, "user": {"login": "rockachopa"}}
|
||||
assert AgentClassifier.classify(pr) == "claude"
|
||||
|
||||
def test_title_tag_ezra(self):
|
||||
pr = {"title": "[Ezra] tmux fleet launcher", "head": {"ref": "burn/10"}, "user": {"login": "rockachopa"}}
|
||||
assert AgentClassifier.classify(pr) == "ezra"
|
||||
|
||||
def test_branch_name_claude(self):
|
||||
pr = {"title": "fix auth", "head": {"ref": "claude/issue-1695"}, "user": {"login": "rockachopa"}}
|
||||
assert AgentClassifier.classify(pr) == "claude"
|
||||
|
||||
def test_user_mapping(self):
|
||||
pr = {"title": "some fix", "head": {"ref": "fix/1"}, "user": {"login": "claude"}}
|
||||
assert AgentClassifier.classify(pr) == "claude"
|
||||
|
||||
def test_rockachopa_maps_to_burn_loop(self):
|
||||
pr = {"title": "some fix", "head": {"ref": "fix/1"}, "user": {"login": "Rockachopa"}}
|
||||
assert AgentClassifier.classify(pr) == "burn-loop"
|
||||
|
||||
def test_unknown_fallback(self):
|
||||
pr = {"title": "some fix", "head": {"ref": "fix/1"}, "user": {"login": "random"}}
|
||||
assert AgentClassifier.classify(pr) == "unknown"
|
||||
|
||||
|
||||
class TestHoursBetween:
|
||||
def test_same_day(self):
|
||||
h = hours_between("2026-04-22T10:00:00Z", "2026-04-22T12:00:00Z")
|
||||
assert h == 2.0
|
||||
|
||||
def test_none_returns_none(self):
|
||||
assert hours_between(None, "2026-04-22T12:00:00Z") is None
|
||||
assert hours_between("2026-04-22T10:00:00Z", None) is None
|
||||
244
timmy-config/agent-quality-scorecard.md
Normal file
244
timmy-config/agent-quality-scorecard.md
Normal file
@@ -0,0 +1,244 @@
|
||||
# Cross-Agent Quality Scorecard
|
||||
|
||||
**Audited at:** 2026-04-22T06:17:43.574309+00:00
|
||||
**Repos audited:** timmy-home, hermes-agent, the-nexus, the-door, fleet-ops, burn-fleet, the-playground, compounding-intelligence, the-beacon, second-son-of-timmy, timmy-academy, timmy-config
|
||||
|
||||
## Per-Agent Summary
|
||||
|
||||
| Agent | Total PRs | Merged | Closed (unmerged) | Open | Merge Rate | Rejection Rate | Avg Hours to Merge | Avg Hours to Close |
|
||||
|---|---|---:|---:|---:|---:|---:|---:|---:|
|
||||
| burn-loop | 1733 | 346 | 1239 | 148 | 21.8% | 78.2% | 18.9 | 20.6 |
|
||||
| unknown | 843 | 598 | 214 | 31 | 73.6% | 26.4% | 2.3 | 11.3 |
|
||||
| claude | 264 | 138 | 121 | 5 | 53.3% | 46.7% | 3.3 | 6.2 |
|
||||
| gemini | 95 | 24 | 70 | 1 | 25.5% | 74.5% | 0.5 | 11.3 |
|
||||
| timmy | 28 | 15 | 11 | 2 | 57.7% | 42.3% | 9.8 | 20.2 |
|
||||
| bezalel | 21 | 11 | 9 | 1 | 55.0% | 45.0% | 2.7 | 8.0 |
|
||||
| allegro | 21 | 7 | 11 | 3 | 38.9% | 61.1% | 31.1 | 20.2 |
|
||||
| ezra | 8 | 2 | 3 | 3 | 40.0% | 60.0% | 4.4 | 16.8 |
|
||||
| kimi | 6 | 3 | 3 | 0 | 50.0% | 50.0% | 39.5 | 0.5 |
|
||||
| manus | 6 | 5 | 1 | 0 | 83.3% | 16.7% | 0.0 | 18.8 |
|
||||
| codex | 2 | 2 | 0 | 0 | 100.0% | 0.0% | 2.3 | — |
|
||||
|
||||
## Per-Repo Merge Rate
|
||||
|
||||
| Repo | Total PRs | Merged | Merge Rate |
|
||||
|---|---|---:|---:|
|
||||
| the-nexus | 985 | 501 | 51.0% |
|
||||
| hermes-agent | 519 | 128 | 25.0% |
|
||||
| timmy-config | 404 | 140 | 35.0% |
|
||||
| timmy-home | 270 | 104 | 39.0% |
|
||||
| fleet-ops | 266 | 84 | 32.0% |
|
||||
| the-beacon | 175 | 62 | 35.0% |
|
||||
| the-door | 153 | 31 | 20.0% |
|
||||
| second-son-of-timmy | 111 | 82 | 74.0% |
|
||||
| compounding-intelligence | 50 | 9 | 18.0% |
|
||||
| the-playground | 44 | 2 | 5.0% |
|
||||
| burn-fleet | 38 | 2 | 5.0% |
|
||||
| timmy-academy | 12 | 6 | 50.0% |
|
||||
|
||||
## Methodology
|
||||
|
||||
- **Agent classification** uses three signals in priority order:
|
||||
1. Explicit title tag (e.g. `[Claude]`, `[Ezra]`)
|
||||
2. Branch name containing agent name (e.g. `claude/issue-123`)
|
||||
3. Git user (`claude` → claude, `Rockachopa` → burn-loop)
|
||||
- **Merge rate** = merged / (merged + closed_unmerged). Open PRs are excluded.
|
||||
- **Rejection rate** = closed_unmerged / (merged + closed_unmerged).
|
||||
- **Time metrics** are computed from created_at to merged_at / closed_at.
|
||||
|
||||
## Raw Data
|
||||
|
||||
```json
|
||||
{
|
||||
"burn-loop": {
|
||||
"total_prs": 1733,
|
||||
"merged": 346,
|
||||
"closed_unmerged": 1239,
|
||||
"open": 148,
|
||||
"merge_rate": 0.218,
|
||||
"rejection_rate": 0.782,
|
||||
"avg_hours_to_merge": 18.9,
|
||||
"avg_hours_to_close": 20.6,
|
||||
"repos": [
|
||||
"burn-fleet",
|
||||
"compounding-intelligence",
|
||||
"fleet-ops",
|
||||
"hermes-agent",
|
||||
"second-son-of-timmy",
|
||||
"the-beacon",
|
||||
"the-door",
|
||||
"the-nexus",
|
||||
"the-playground",
|
||||
"timmy-academy",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"unknown": {
|
||||
"total_prs": 843,
|
||||
"merged": 598,
|
||||
"closed_unmerged": 214,
|
||||
"open": 31,
|
||||
"merge_rate": 0.736,
|
||||
"rejection_rate": 0.264,
|
||||
"avg_hours_to_merge": 2.3,
|
||||
"avg_hours_to_close": 11.3,
|
||||
"repos": [
|
||||
"fleet-ops",
|
||||
"hermes-agent",
|
||||
"second-son-of-timmy",
|
||||
"the-beacon",
|
||||
"the-door",
|
||||
"the-nexus",
|
||||
"timmy-academy",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"claude": {
|
||||
"total_prs": 264,
|
||||
"merged": 138,
|
||||
"closed_unmerged": 121,
|
||||
"open": 5,
|
||||
"merge_rate": 0.533,
|
||||
"rejection_rate": 0.467,
|
||||
"avg_hours_to_merge": 3.3,
|
||||
"avg_hours_to_close": 6.2,
|
||||
"repos": [
|
||||
"hermes-agent",
|
||||
"the-nexus",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"gemini": {
|
||||
"total_prs": 95,
|
||||
"merged": 24,
|
||||
"closed_unmerged": 70,
|
||||
"open": 1,
|
||||
"merge_rate": 0.255,
|
||||
"rejection_rate": 0.745,
|
||||
"avg_hours_to_merge": 0.5,
|
||||
"avg_hours_to_close": 11.3,
|
||||
"repos": [
|
||||
"hermes-agent",
|
||||
"the-nexus",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"timmy": {
|
||||
"total_prs": 28,
|
||||
"merged": 15,
|
||||
"closed_unmerged": 11,
|
||||
"open": 2,
|
||||
"merge_rate": 0.577,
|
||||
"rejection_rate": 0.423,
|
||||
"avg_hours_to_merge": 9.8,
|
||||
"avg_hours_to_close": 20.2,
|
||||
"repos": [
|
||||
"burn-fleet",
|
||||
"hermes-agent",
|
||||
"the-nexus",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"bezalel": {
|
||||
"total_prs": 21,
|
||||
"merged": 11,
|
||||
"closed_unmerged": 9,
|
||||
"open": 1,
|
||||
"merge_rate": 0.55,
|
||||
"rejection_rate": 0.45,
|
||||
"avg_hours_to_merge": 2.7,
|
||||
"avg_hours_to_close": 8.0,
|
||||
"repos": [
|
||||
"burn-fleet",
|
||||
"hermes-agent",
|
||||
"the-beacon",
|
||||
"the-nexus",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"allegro": {
|
||||
"total_prs": 21,
|
||||
"merged": 7,
|
||||
"closed_unmerged": 11,
|
||||
"open": 3,
|
||||
"merge_rate": 0.389,
|
||||
"rejection_rate": 0.611,
|
||||
"avg_hours_to_merge": 31.1,
|
||||
"avg_hours_to_close": 20.2,
|
||||
"repos": [
|
||||
"burn-fleet",
|
||||
"hermes-agent",
|
||||
"the-beacon",
|
||||
"the-nexus",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"ezra": {
|
||||
"total_prs": 8,
|
||||
"merged": 2,
|
||||
"closed_unmerged": 3,
|
||||
"open": 3,
|
||||
"merge_rate": 0.4,
|
||||
"rejection_rate": 0.6,
|
||||
"avg_hours_to_merge": 4.4,
|
||||
"avg_hours_to_close": 16.8,
|
||||
"repos": [
|
||||
"burn-fleet",
|
||||
"fleet-ops",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"kimi": {
|
||||
"total_prs": 6,
|
||||
"merged": 3,
|
||||
"closed_unmerged": 3,
|
||||
"open": 0,
|
||||
"merge_rate": 0.5,
|
||||
"rejection_rate": 0.5,
|
||||
"avg_hours_to_merge": 39.5,
|
||||
"avg_hours_to_close": 0.5,
|
||||
"repos": [
|
||||
"hermes-agent",
|
||||
"the-nexus",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"manus": {
|
||||
"total_prs": 6,
|
||||
"merged": 5,
|
||||
"closed_unmerged": 1,
|
||||
"open": 0,
|
||||
"merge_rate": 0.833,
|
||||
"rejection_rate": 0.167,
|
||||
"avg_hours_to_merge": 0.0,
|
||||
"avg_hours_to_close": 18.8,
|
||||
"repos": [
|
||||
"the-nexus",
|
||||
"timmy-config"
|
||||
]
|
||||
},
|
||||
"codex": {
|
||||
"total_prs": 2,
|
||||
"merged": 2,
|
||||
"closed_unmerged": 0,
|
||||
"open": 0,
|
||||
"merge_rate": 1.0,
|
||||
"rejection_rate": 0.0,
|
||||
"avg_hours_to_merge": 2.3,
|
||||
"avg_hours_to_close": null,
|
||||
"repos": [
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Reference in New Issue
Block a user