Compare commits
1 Commits
step35/874
...
fix/518
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3f45cae90a |
70
README.md
70
README.md
@@ -112,76 +112,6 @@ pytest tests/
|
||||
```
|
||||
|
||||
### Project Structure
|
||||
## Sherlock Username Recon Wrapper
|
||||
|
||||
### Quick Usage
|
||||
|
||||
```bash
|
||||
# Opt-in via env var
|
||||
export SHERLOCK_ENABLED=1
|
||||
|
||||
# Or via explicit CLI flag
|
||||
python -m tools.sherlock_wrapper --query "alice" --opt-in --json
|
||||
|
||||
# With site whitelist
|
||||
python -m tools.sherlock_wrapper --query "alice" --opt-in --sites github twitter --json
|
||||
```
|
||||
|
||||
### What It Does
|
||||
|
||||
Builds a bounded local wrapper around the Sherlock username OSINT tool that:
|
||||
|
||||
- **Opt-in gate** — SHERLOCK_ENABLED=1 or `--opt-in` required before any external call
|
||||
- **Local-first caching** — results cached in `~/.cache/timmy/sherlock_cache.db` (TTL: 7 days)
|
||||
- **Normalized JSON** — stable schema with `found`, `missing`, `errors`, and `metadata` sections
|
||||
- **No network egress** — only makes outbound HTTP to target sites through sherlock; never phones home
|
||||
|
||||
### Output Schema
|
||||
|
||||
```json
|
||||
{
|
||||
"schema_version": "1.0",
|
||||
"query": "alice",
|
||||
"timestamp": "2025-04-26T14:23:00+00:00",
|
||||
"found": [
|
||||
{"site": "github", "url": "https://github.com/alice"}
|
||||
],
|
||||
"missing": ["twitter", "facebook"],
|
||||
"errors": [{"site": "instagram", "error": "timeout"}],
|
||||
"metadata": {
|
||||
"total_sites_checked": 50,
|
||||
"found_count": 1,
|
||||
"missing_count": 48,
|
||||
"error_count": 1
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Setup
|
||||
|
||||
Sherlock must be installed separately:
|
||||
|
||||
```bash
|
||||
pip install sherlock-project
|
||||
```
|
||||
|
||||
The wrapper is pure Python and requires only stdlib apart from sherlock itself.
|
||||
|
||||
### Why an Opt-In Gate?
|
||||
|
||||
Sherlock makes outbound HTTP requests to dozens of third-party sites. The opt-in gate:
|
||||
1. Ensures a human operator explicitly approves this dependency
|
||||
2. Makes the outbound traffic auditable in session logs
|
||||
3. Prevents accidental invocation in automated pipelines
|
||||
|
||||
### Running the Smoke Test
|
||||
|
||||
```bash
|
||||
# Run unit + integration tests
|
||||
pytest tests/test_sherlock_wrapper.py -v
|
||||
```
|
||||
|
||||
|
||||
|
||||
```
|
||||
.
|
||||
|
||||
313
scripts/cross_agent_quality_audit.py
Normal file
313
scripts/cross_agent_quality_audit.py
Normal file
@@ -0,0 +1,313 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Cross-agent quality audit — #518
|
||||
|
||||
Fetches all PRs across Timmy_Foundation repos, classifies by agent,
|
||||
and produces a merge-rate scorecard.
|
||||
|
||||
Usage:
|
||||
python scripts/cross_agent_quality_audit.py
|
||||
python scripts/cross_agent_quality_audit.py --scorecard timmy-config/agent-quality-scorecard.md
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import requests
|
||||
|
||||
GITEA_BASE = "https://forge.alexanderwhitestone.com/api/v1"
|
||||
ORG = "Timmy_Foundation"
|
||||
TOKEN = os.environ.get("GITEA_TOKEN") or (
|
||||
Path.home() / ".config" / "gitea" / "token"
|
||||
).read_text().strip()
|
||||
|
||||
HEADERS = {"Authorization": f"token {TOKEN}"}
|
||||
|
||||
# Repos to audit (active code repos)
|
||||
DEFAULT_REPOS = [
|
||||
"timmy-home",
|
||||
"hermes-agent",
|
||||
"the-nexus",
|
||||
"the-door",
|
||||
"fleet-ops",
|
||||
"burn-fleet",
|
||||
"the-playground",
|
||||
"compounding-intelligence",
|
||||
"the-beacon",
|
||||
"second-son-of-timmy",
|
||||
"timmy-academy",
|
||||
"timmy-config",
|
||||
]
|
||||
|
||||
|
||||
class AgentClassifier:
|
||||
"""Classify PRs by agent identity."""
|
||||
|
||||
# PR title prefixes that explicitly name an agent
|
||||
AGENT_TITLE_RE = re.compile(
|
||||
r"^\[(?P<agent>Claude|Ezra|Allegro|Bezalel|Timmy|Gemini|Kimi|Manus|Codex)\]",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
# Branch patterns that embed agent names
|
||||
AGENT_BRANCH_RE = re.compile(
|
||||
r"(?P<agent>claude|ezra|allegro|bezalel|timmy|gemini|kimi|manus|codex)",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def classify(cls, pr: Dict[str, Any]) -> str:
|
||||
title = pr.get("title", "")
|
||||
branch = pr.get("head", {}).get("ref", "")
|
||||
user = pr.get("user", {}).get("login", "")
|
||||
|
||||
# 1. Explicit title tag like [Claude] or [Ezra]
|
||||
m = cls.AGENT_TITLE_RE.match(title)
|
||||
if m:
|
||||
return m.group("agent").lower()
|
||||
|
||||
# 2. Branch contains agent name (e.g. claude/issue-123)
|
||||
m = cls.AGENT_BRANCH_RE.search(branch)
|
||||
if m:
|
||||
return m.group("agent").lower()
|
||||
|
||||
# 3. Git user mapping
|
||||
if user.lower() == "claude":
|
||||
return "claude"
|
||||
if user.lower() == "rockachopa":
|
||||
# Rockachopa is the human / orchestrator — map to "burn-loop"
|
||||
return "burn-loop"
|
||||
|
||||
return "unknown"
|
||||
|
||||
|
||||
def fetch_prs(repo: str, state: str = "all", per_page: int = 50) -> List[Dict[str, Any]]:
|
||||
"""Paginate through all PRs for a repo."""
|
||||
prs: List[Dict[str, Any]] = []
|
||||
page = 1
|
||||
while True:
|
||||
url = f"{GITEA_BASE}/repos/{ORG}/{repo}/pulls?state={state}&limit={per_page}&page={page}"
|
||||
resp = requests.get(url, headers=HEADERS, timeout=30)
|
||||
resp.raise_for_status()
|
||||
batch = resp.json()
|
||||
if not batch:
|
||||
break
|
||||
prs.extend(batch)
|
||||
if len(batch) < per_page:
|
||||
break
|
||||
page += 1
|
||||
return prs
|
||||
|
||||
|
||||
def parse_datetime(dt_str: Optional[str]) -> Optional[datetime]:
|
||||
if not dt_str:
|
||||
return None
|
||||
try:
|
||||
return datetime.fromisoformat(dt_str.replace("Z", "+00:00"))
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def hours_between(start: Optional[str], end: Optional[str]) -> Optional[float]:
|
||||
s = parse_datetime(start)
|
||||
e = parse_datetime(end)
|
||||
if s and e:
|
||||
return (e - s).total_seconds() / 3600
|
||||
return None
|
||||
|
||||
|
||||
def audit_repos(repos: List[str]) -> Dict[str, Any]:
|
||||
"""Run the audit and return aggregated stats."""
|
||||
agent_stats: Dict[str, Dict[str, Any]] = defaultdict(
|
||||
lambda: {
|
||||
"total": 0,
|
||||
"merged": 0,
|
||||
"closed_unmerged": 0,
|
||||
"open": 0,
|
||||
"hours_to_merge": [],
|
||||
"hours_to_close": [],
|
||||
"repos": set(),
|
||||
"prs": [],
|
||||
}
|
||||
)
|
||||
|
||||
repo_stats: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
for repo in repos:
|
||||
print(f"Fetching PRs for {repo} ...", file=sys.stderr)
|
||||
try:
|
||||
prs = fetch_prs(repo)
|
||||
except requests.HTTPError as exc:
|
||||
print(f" SKIP {repo}: {exc}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
repo_merged = 0
|
||||
repo_total = len(prs)
|
||||
for pr in prs:
|
||||
agent = AgentClassifier.classify(pr)
|
||||
s = agent_stats[agent]
|
||||
s["total"] += 1
|
||||
s["repos"].add(repo)
|
||||
s["prs"].append(
|
||||
{
|
||||
"repo": repo,
|
||||
"number": pr["number"],
|
||||
"title": pr["title"],
|
||||
"state": pr["state"],
|
||||
"merged": pr.get("merged", False),
|
||||
"created_at": pr.get("created_at"),
|
||||
"merged_at": pr.get("merged_at"),
|
||||
"closed_at": pr.get("closed_at"),
|
||||
}
|
||||
)
|
||||
|
||||
if pr.get("merged"):
|
||||
s["merged"] += 1
|
||||
repo_merged += 1
|
||||
h = hours_between(pr.get("created_at"), pr.get("merged_at"))
|
||||
if h is not None:
|
||||
s["hours_to_merge"].append(h)
|
||||
elif pr["state"] == "closed":
|
||||
s["closed_unmerged"] += 1
|
||||
h = hours_between(pr.get("created_at"), pr.get("closed_at"))
|
||||
if h is not None:
|
||||
s["hours_to_close"].append(h)
|
||||
else:
|
||||
s["open"] += 1
|
||||
|
||||
repo_stats[repo] = {
|
||||
"total": repo_total,
|
||||
"merged": repo_merged,
|
||||
"merge_rate": round(repo_merged / repo_total, 2) if repo_total else 0,
|
||||
}
|
||||
|
||||
# Compute derived metrics
|
||||
summary = {}
|
||||
for agent, s in sorted(agent_stats.items(), key=lambda x: -x[1]["total"]):
|
||||
total = s["total"]
|
||||
merged = s["merged"]
|
||||
closed = s["closed_unmerged"]
|
||||
resolved = merged + closed
|
||||
merge_rate = round(merged / resolved, 3) if resolved else 0
|
||||
avg_merge_hours = (
|
||||
round(sum(s["hours_to_merge"]) / len(s["hours_to_merge"]), 1)
|
||||
if s["hours_to_merge"]
|
||||
else None
|
||||
)
|
||||
avg_close_hours = (
|
||||
round(sum(s["hours_to_close"]) / len(s["hours_to_close"]), 1)
|
||||
if s["hours_to_close"]
|
||||
else None
|
||||
)
|
||||
summary[agent] = {
|
||||
"total_prs": total,
|
||||
"merged": merged,
|
||||
"closed_unmerged": closed,
|
||||
"open": s["open"],
|
||||
"merge_rate": merge_rate,
|
||||
"rejection_rate": round(closed / resolved, 3) if resolved else 0,
|
||||
"avg_hours_to_merge": avg_merge_hours,
|
||||
"avg_hours_to_close": avg_close_hours,
|
||||
"repos": sorted(s["repos"]),
|
||||
}
|
||||
|
||||
return {
|
||||
"audited_at": datetime.now(timezone.utc).isoformat(),
|
||||
"repos_audited": repos,
|
||||
"repo_stats": repo_stats,
|
||||
"agent_summary": summary,
|
||||
"raw_prs": {a: s["prs"] for a, s in agent_stats.items()},
|
||||
}
|
||||
|
||||
|
||||
def render_scorecard(data: Dict[str, Any]) -> str:
|
||||
"""Render a markdown scorecard."""
|
||||
lines = [
|
||||
"# Cross-Agent Quality Scorecard",
|
||||
"",
|
||||
f"**Audited at:** {data['audited_at']}",
|
||||
f"**Repos audited:** {', '.join(data['repos_audited'])}",
|
||||
"",
|
||||
"## Per-Agent Summary",
|
||||
"",
|
||||
"| Agent | Total PRs | Merged | Closed (unmerged) | Open | Merge Rate | Rejection Rate | Avg Hours to Merge | Avg Hours to Close |",
|
||||
"|---|---|---:|---:|---:|---:|---:|---:|---:|",
|
||||
]
|
||||
|
||||
for agent, s in data["agent_summary"].items():
|
||||
merge_hours = f"{s['avg_hours_to_merge']:.1f}" if s["avg_hours_to_merge"] is not None else "—"
|
||||
close_hours = f"{s['avg_hours_to_close']:.1f}" if s["avg_hours_to_close"] is not None else "—"
|
||||
lines.append(
|
||||
f"| {agent} | {s['total_prs']} | {s['merged']} | {s['closed_unmerged']} | "
|
||||
f"{s['open']} | {s['merge_rate']:.1%} | {s['rejection_rate']:.1%} | "
|
||||
f"{merge_hours} | {close_hours} |"
|
||||
)
|
||||
|
||||
lines.extend([
|
||||
"",
|
||||
"## Per-Repo Merge Rate",
|
||||
"",
|
||||
"| Repo | Total PRs | Merged | Merge Rate |",
|
||||
"|---|---|---:|---:|",
|
||||
])
|
||||
|
||||
for repo, s in sorted(data["repo_stats"].items(), key=lambda x: -x[1]["total"]):
|
||||
lines.append(
|
||||
f"| {repo} | {s['total']} | {s['merged']} | {s['merge_rate']:.1%} |"
|
||||
)
|
||||
|
||||
lines.extend([
|
||||
"",
|
||||
"## Methodology",
|
||||
"",
|
||||
"- **Agent classification** uses three signals in priority order:",
|
||||
" 1. Explicit title tag (e.g. `[Claude]`, `[Ezra]`)",
|
||||
" 2. Branch name containing agent name (e.g. `claude/issue-123`)",
|
||||
" 3. Git user (`claude` → claude, `Rockachopa` → burn-loop)",
|
||||
"- **Merge rate** = merged / (merged + closed_unmerged). Open PRs are excluded.",
|
||||
"- **Rejection rate** = closed_unmerged / (merged + closed_unmerged).",
|
||||
"- **Time metrics** are computed from created_at to merged_at / closed_at.",
|
||||
"",
|
||||
"## Raw Data",
|
||||
"",
|
||||
"```json",
|
||||
json.dumps(data["agent_summary"], indent=2),
|
||||
"```",
|
||||
"",
|
||||
])
|
||||
|
||||
return "\n".join(lines) + "\n"
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description="Cross-agent quality audit")
|
||||
parser.add_argument("--repos", nargs="+", default=DEFAULT_REPOS, help="Repos to audit")
|
||||
parser.add_argument("--scorecard", default="timmy-config/agent-quality-scorecard.md", help="Output path")
|
||||
parser.add_argument("--json", default=None, help="Also write raw JSON to path")
|
||||
args = parser.parse_args()
|
||||
|
||||
data = audit_repos(args.repos)
|
||||
|
||||
scorecard_path = Path(args.scorecard)
|
||||
scorecard_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
scorecard_path.write_text(render_scorecard(data))
|
||||
print(f"Scorecard written to {scorecard_path}", file=sys.stderr)
|
||||
|
||||
if args.json:
|
||||
json_path = Path(args.json)
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
json_path.write_text(json.dumps(data, indent=2, default=str))
|
||||
print(f"Raw JSON written to {json_path}", file=sys.stderr)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
45
tests/test_cross_agent_quality_audit.py
Normal file
45
tests/test_cross_agent_quality_audit.py
Normal file
@@ -0,0 +1,45 @@
|
||||
"""Tests for cross_agent_quality_audit.py — #518."""
|
||||
|
||||
import pytest
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent / "scripts"))
|
||||
|
||||
from cross_agent_quality_audit import AgentClassifier, hours_between
|
||||
|
||||
|
||||
class TestAgentClassifier:
|
||||
def test_title_tag_claude(self):
|
||||
pr = {"title": "[Claude] fix auth middleware", "head": {"ref": "fix/123"}, "user": {"login": "rockachopa"}}
|
||||
assert AgentClassifier.classify(pr) == "claude"
|
||||
|
||||
def test_title_tag_ezra(self):
|
||||
pr = {"title": "[Ezra] tmux fleet launcher", "head": {"ref": "burn/10"}, "user": {"login": "rockachopa"}}
|
||||
assert AgentClassifier.classify(pr) == "ezra"
|
||||
|
||||
def test_branch_name_claude(self):
|
||||
pr = {"title": "fix auth", "head": {"ref": "claude/issue-1695"}, "user": {"login": "rockachopa"}}
|
||||
assert AgentClassifier.classify(pr) == "claude"
|
||||
|
||||
def test_user_mapping(self):
|
||||
pr = {"title": "some fix", "head": {"ref": "fix/1"}, "user": {"login": "claude"}}
|
||||
assert AgentClassifier.classify(pr) == "claude"
|
||||
|
||||
def test_rockachopa_maps_to_burn_loop(self):
|
||||
pr = {"title": "some fix", "head": {"ref": "fix/1"}, "user": {"login": "Rockachopa"}}
|
||||
assert AgentClassifier.classify(pr) == "burn-loop"
|
||||
|
||||
def test_unknown_fallback(self):
|
||||
pr = {"title": "some fix", "head": {"ref": "fix/1"}, "user": {"login": "random"}}
|
||||
assert AgentClassifier.classify(pr) == "unknown"
|
||||
|
||||
|
||||
class TestHoursBetween:
|
||||
def test_same_day(self):
|
||||
h = hours_between("2026-04-22T10:00:00Z", "2026-04-22T12:00:00Z")
|
||||
assert h == 2.0
|
||||
|
||||
def test_none_returns_none(self):
|
||||
assert hours_between(None, "2026-04-22T12:00:00Z") is None
|
||||
assert hours_between("2026-04-22T10:00:00Z", None) is None
|
||||
@@ -1,182 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Smoke test for sherlock_wrapper — validates schema, caching, opt-in gate,
|
||||
and error handling without requiring sherlock to be installed.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "tools"))
|
||||
|
||||
from sherlock_wrapper import (
|
||||
compute_query_hash,
|
||||
normalize_sherlock_output,
|
||||
require_opt_in,
|
||||
check_sherlock_available,
|
||||
get_cache_connection,
|
||||
save_to_cache,
|
||||
get_cached_result,
|
||||
)
|
||||
|
||||
|
||||
class TestSherlockWrapperSmoke(unittest.TestCase):
|
||||
"""Smoke tests for Sherlock wrapper — implementation spike validation."""
|
||||
|
||||
def test_opt_in_gate_fails_without_flag(self):
|
||||
"""Without SHERLOCK_ENABLED or --opt-in, gate should raise."""
|
||||
with patch("sherlock_wrapper.SHERLOCK_ENABLED", False):
|
||||
with self.assertRaises(RuntimeError) as ctx:
|
||||
require_opt_in(opt_in=False)
|
||||
self.assertIn("opt-in only", str(ctx.exception).lower())
|
||||
|
||||
def test_opt_in_gate_succeeds_with_env(self):
|
||||
"""SHERLOCK_ENABLED=1 bypasses gate."""
|
||||
with patch("sherlock_wrapper.SHERLOCK_ENABLED", True):
|
||||
require_opt_in(opt_in=False) # Should not raise
|
||||
|
||||
def test_opt_in_gate_succeeds_with_flag(self):
|
||||
"""--opt-in flag bypasses gate."""
|
||||
with patch("sherlock_wrapper.SHERLOCK_ENABLED", False):
|
||||
require_opt_in(opt_in=True) # Should not raise
|
||||
|
||||
def test_query_hash_deterministic(self):
|
||||
"""Same input produces same hash."""
|
||||
h1 = compute_query_hash("alice")
|
||||
h2 = compute_query_hash("alice")
|
||||
self.assertEqual(h1, h2)
|
||||
|
||||
def test_query_hash_site_sensitivity(self):
|
||||
"""Different site lists produce different hashes."""
|
||||
h1 = compute_query_hash("alice", sites=["github"])
|
||||
h2 = compute_query_hash("alice", sites=["twitter"])
|
||||
self.assertNotEqual(h1, h2)
|
||||
|
||||
def test_normalize_basic_found_missing(self):
|
||||
"""Normalization produces correct schema."""
|
||||
raw = {
|
||||
"github": {"status": "found", "url": "https://github.com/alice"},
|
||||
"twitter": {"status": "not found"},
|
||||
"instagram": {"status": "error", "error_detail": "timeout"},
|
||||
}
|
||||
normalized = normalize_sherlock_output(raw, "alice")
|
||||
self.assertEqual(normalized["query"], "alice")
|
||||
self.assertEqual(normalized["metadata"]["found_count"], 1)
|
||||
self.assertEqual(normalized["metadata"]["missing_count"], 1)
|
||||
self.assertEqual(normalized["metadata"]["error_count"], 1)
|
||||
self.assertEqual(len(normalized["found"]), 1)
|
||||
self.assertEqual(normalized["found"][0]["site"], "github")
|
||||
self.assertIn("twitter", normalized["missing"])
|
||||
self.assertEqual(normalized["errors"][0]["site"], "instagram")
|
||||
|
||||
def test_normalized_schema_has_required_fields(self):
|
||||
"""Output schema contains all required top-level keys."""
|
||||
raw = {"site1": {"status": "not found"}}
|
||||
normalized = normalize_sherlock_output(raw, "testuser")
|
||||
required = ["schema_version", "query", "timestamp", "found", "missing",
|
||||
"errors", "metadata"]
|
||||
for key in required:
|
||||
self.assertIn(key, normalized)
|
||||
self.assertIsInstance(normalized["timestamp"], str)
|
||||
self.assertIsInstance(normalized["found"], list)
|
||||
self.assertIsInstance(normalized["missing"], list)
|
||||
self.assertIsInstance(normalized["errors"], list)
|
||||
self.assertIsInstance(normalized["metadata"], dict)
|
||||
|
||||
def test_cache_roundtrip(self):
|
||||
"""Result can be written and read back from cache."""
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
with patch("sherlock_wrapper.CACHE_DB", Path(tmp) / "cache.db"):
|
||||
test_result = {
|
||||
"schema_version": "1.0",
|
||||
"query": "alice",
|
||||
"timestamp": "2025-04-26T00:00:00+00:00",
|
||||
"found": [],
|
||||
"missing": ["github"],
|
||||
"errors": [],
|
||||
"metadata": {"total_sites_checked": 1, "found_count": 0, "missing_count": 1, "error_count": 0},
|
||||
}
|
||||
query_hash = compute_query_hash("alice")
|
||||
save_to_cache(query_hash, test_result)
|
||||
retrieved = get_cached_result(query_hash)
|
||||
self.assertEqual(retrieved, test_result)
|
||||
|
||||
def test_cache_miss_on_stale(self):
|
||||
"""Cache returns None when entry is older than 7 days."""
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
db_path = Path(tmp) / "cache.db"
|
||||
with patch("sherlock_wrapper.CACHE_DB", db_path):
|
||||
old_ts = "2025-04-01T00:00:00+00:00"
|
||||
old_result = {
|
||||
"schema_version": "1.0", "query": "alice",
|
||||
"timestamp": old_ts, "found": [], "missing": [], "errors": [],
|
||||
"metadata": {"total_sites_checked": 0, "found_count": 0, "missing_count": 0, "error_count": 0},
|
||||
}
|
||||
query_hash = compute_query_hash("alice")
|
||||
# Direct DB insert with controlled timestamp (bypass save_to_cache's NOW)
|
||||
conn = get_cache_connection()
|
||||
conn.execute(
|
||||
"INSERT INTO cache (query_hash, result_json, timestamp) VALUES (?, ?, ?)",
|
||||
(query_hash, json.dumps(old_result), old_ts)
|
||||
)
|
||||
conn.commit()
|
||||
retrieved = get_cached_result(query_hash)
|
||||
self.assertIsNone(retrieved)
|
||||
|
||||
def test_sherlock_available_check(self):
|
||||
"""check_sherlock_available returns bool."""
|
||||
available = check_sherlock_available()
|
||||
self.assertIsInstance(available, bool)
|
||||
# Note: on this test system sherlock may not be installed, so False is expected.
|
||||
# The important thing is the function returns a bool.
|
||||
print(f"[INFO] Sherlock installed: {available}")
|
||||
|
||||
|
||||
class TestSherlockWrapperIntegration(unittest.TestCase):
|
||||
"""Integration tests with mocked sherlock module."""
|
||||
|
||||
def test_run_sherlock_with_opt_in(self):
|
||||
"""run_sherlock succeeds with opt-in and returns normalized result."""
|
||||
fake_sherlock = MagicMock()
|
||||
fake_sherlock.sherlock = MagicMock(return_value={
|
||||
"github": {"status": "found", "url": "https://github.com/alice"},
|
||||
"twitter": {"status": "not found"},
|
||||
})
|
||||
with patch.dict("sys.modules", {"sherlock": fake_sherlock}):
|
||||
import importlib
|
||||
import sherlock_wrapper
|
||||
importlib.reload(sherlock_wrapper)
|
||||
with patch.dict(os.environ, {"SHERLOCK_ENABLED": "1"}):
|
||||
from sherlock_wrapper import run_sherlock
|
||||
result = run_sherlock("alice", opt_in=True)
|
||||
self.assertEqual(result["query"], "alice")
|
||||
self.assertEqual(result["metadata"]["found_count"], 1)
|
||||
|
||||
def test_run_sherlock_fails_without_opt_in(self):
|
||||
"""run_sherlock raises RuntimeError without opt-in."""
|
||||
from sherlock_wrapper import run_sherlock
|
||||
with self.assertRaises(RuntimeError) as ctx:
|
||||
run_sherlock("alice", opt_in=False)
|
||||
self.assertIn("opt-in only", str(ctx.exception).lower())
|
||||
|
||||
def test_run_sherlock_uses_cache(self):
|
||||
"""Cached result short-circuits sherlock execution."""
|
||||
cached = {
|
||||
"schema_version": "1.0", "query": "alice", "timestamp": "2025-04-26T00:00:00+00:00",
|
||||
"found": [{"site": "github", "url": "https://github.com/alice"}],
|
||||
"missing": ["twitter"],
|
||||
"errors": [],
|
||||
"metadata": {"total_sites_checked": 2, "found_count": 1, "missing_count": 1, "error_count": 0},
|
||||
}
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
with patch("sherlock_wrapper.CACHE_DB", Path(tmp) / "cache.db"):
|
||||
query_hash = compute_query_hash("alice")
|
||||
save_to_cache(query_hash, cached)
|
||||
from sherlock_wrapper import run_sherlock
|
||||
result = run_sherlock("alice", opt_in=True)
|
||||
self.assertEqual(result, cached)
|
||||
244
timmy-config/agent-quality-scorecard.md
Normal file
244
timmy-config/agent-quality-scorecard.md
Normal file
@@ -0,0 +1,244 @@
|
||||
# Cross-Agent Quality Scorecard
|
||||
|
||||
**Audited at:** 2026-04-22T06:17:43.574309+00:00
|
||||
**Repos audited:** timmy-home, hermes-agent, the-nexus, the-door, fleet-ops, burn-fleet, the-playground, compounding-intelligence, the-beacon, second-son-of-timmy, timmy-academy, timmy-config
|
||||
|
||||
## Per-Agent Summary
|
||||
|
||||
| Agent | Total PRs | Merged | Closed (unmerged) | Open | Merge Rate | Rejection Rate | Avg Hours to Merge | Avg Hours to Close |
|
||||
|---|---|---:|---:|---:|---:|---:|---:|---:|
|
||||
| burn-loop | 1733 | 346 | 1239 | 148 | 21.8% | 78.2% | 18.9 | 20.6 |
|
||||
| unknown | 843 | 598 | 214 | 31 | 73.6% | 26.4% | 2.3 | 11.3 |
|
||||
| claude | 264 | 138 | 121 | 5 | 53.3% | 46.7% | 3.3 | 6.2 |
|
||||
| gemini | 95 | 24 | 70 | 1 | 25.5% | 74.5% | 0.5 | 11.3 |
|
||||
| timmy | 28 | 15 | 11 | 2 | 57.7% | 42.3% | 9.8 | 20.2 |
|
||||
| bezalel | 21 | 11 | 9 | 1 | 55.0% | 45.0% | 2.7 | 8.0 |
|
||||
| allegro | 21 | 7 | 11 | 3 | 38.9% | 61.1% | 31.1 | 20.2 |
|
||||
| ezra | 8 | 2 | 3 | 3 | 40.0% | 60.0% | 4.4 | 16.8 |
|
||||
| kimi | 6 | 3 | 3 | 0 | 50.0% | 50.0% | 39.5 | 0.5 |
|
||||
| manus | 6 | 5 | 1 | 0 | 83.3% | 16.7% | 0.0 | 18.8 |
|
||||
| codex | 2 | 2 | 0 | 0 | 100.0% | 0.0% | 2.3 | — |
|
||||
|
||||
## Per-Repo Merge Rate
|
||||
|
||||
| Repo | Total PRs | Merged | Merge Rate |
|
||||
|---|---|---:|---:|
|
||||
| the-nexus | 985 | 501 | 51.0% |
|
||||
| hermes-agent | 519 | 128 | 25.0% |
|
||||
| timmy-config | 404 | 140 | 35.0% |
|
||||
| timmy-home | 270 | 104 | 39.0% |
|
||||
| fleet-ops | 266 | 84 | 32.0% |
|
||||
| the-beacon | 175 | 62 | 35.0% |
|
||||
| the-door | 153 | 31 | 20.0% |
|
||||
| second-son-of-timmy | 111 | 82 | 74.0% |
|
||||
| compounding-intelligence | 50 | 9 | 18.0% |
|
||||
| the-playground | 44 | 2 | 5.0% |
|
||||
| burn-fleet | 38 | 2 | 5.0% |
|
||||
| timmy-academy | 12 | 6 | 50.0% |
|
||||
|
||||
## Methodology
|
||||
|
||||
- **Agent classification** uses three signals in priority order:
|
||||
1. Explicit title tag (e.g. `[Claude]`, `[Ezra]`)
|
||||
2. Branch name containing agent name (e.g. `claude/issue-123`)
|
||||
3. Git user (`claude` → claude, `Rockachopa` → burn-loop)
|
||||
- **Merge rate** = merged / (merged + closed_unmerged). Open PRs are excluded.
|
||||
- **Rejection rate** = closed_unmerged / (merged + closed_unmerged).
|
||||
- **Time metrics** are computed from created_at to merged_at / closed_at.
|
||||
|
||||
## Raw Data
|
||||
|
||||
```json
|
||||
{
|
||||
"burn-loop": {
|
||||
"total_prs": 1733,
|
||||
"merged": 346,
|
||||
"closed_unmerged": 1239,
|
||||
"open": 148,
|
||||
"merge_rate": 0.218,
|
||||
"rejection_rate": 0.782,
|
||||
"avg_hours_to_merge": 18.9,
|
||||
"avg_hours_to_close": 20.6,
|
||||
"repos": [
|
||||
"burn-fleet",
|
||||
"compounding-intelligence",
|
||||
"fleet-ops",
|
||||
"hermes-agent",
|
||||
"second-son-of-timmy",
|
||||
"the-beacon",
|
||||
"the-door",
|
||||
"the-nexus",
|
||||
"the-playground",
|
||||
"timmy-academy",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"unknown": {
|
||||
"total_prs": 843,
|
||||
"merged": 598,
|
||||
"closed_unmerged": 214,
|
||||
"open": 31,
|
||||
"merge_rate": 0.736,
|
||||
"rejection_rate": 0.264,
|
||||
"avg_hours_to_merge": 2.3,
|
||||
"avg_hours_to_close": 11.3,
|
||||
"repos": [
|
||||
"fleet-ops",
|
||||
"hermes-agent",
|
||||
"second-son-of-timmy",
|
||||
"the-beacon",
|
||||
"the-door",
|
||||
"the-nexus",
|
||||
"timmy-academy",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"claude": {
|
||||
"total_prs": 264,
|
||||
"merged": 138,
|
||||
"closed_unmerged": 121,
|
||||
"open": 5,
|
||||
"merge_rate": 0.533,
|
||||
"rejection_rate": 0.467,
|
||||
"avg_hours_to_merge": 3.3,
|
||||
"avg_hours_to_close": 6.2,
|
||||
"repos": [
|
||||
"hermes-agent",
|
||||
"the-nexus",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"gemini": {
|
||||
"total_prs": 95,
|
||||
"merged": 24,
|
||||
"closed_unmerged": 70,
|
||||
"open": 1,
|
||||
"merge_rate": 0.255,
|
||||
"rejection_rate": 0.745,
|
||||
"avg_hours_to_merge": 0.5,
|
||||
"avg_hours_to_close": 11.3,
|
||||
"repos": [
|
||||
"hermes-agent",
|
||||
"the-nexus",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"timmy": {
|
||||
"total_prs": 28,
|
||||
"merged": 15,
|
||||
"closed_unmerged": 11,
|
||||
"open": 2,
|
||||
"merge_rate": 0.577,
|
||||
"rejection_rate": 0.423,
|
||||
"avg_hours_to_merge": 9.8,
|
||||
"avg_hours_to_close": 20.2,
|
||||
"repos": [
|
||||
"burn-fleet",
|
||||
"hermes-agent",
|
||||
"the-nexus",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"bezalel": {
|
||||
"total_prs": 21,
|
||||
"merged": 11,
|
||||
"closed_unmerged": 9,
|
||||
"open": 1,
|
||||
"merge_rate": 0.55,
|
||||
"rejection_rate": 0.45,
|
||||
"avg_hours_to_merge": 2.7,
|
||||
"avg_hours_to_close": 8.0,
|
||||
"repos": [
|
||||
"burn-fleet",
|
||||
"hermes-agent",
|
||||
"the-beacon",
|
||||
"the-nexus",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"allegro": {
|
||||
"total_prs": 21,
|
||||
"merged": 7,
|
||||
"closed_unmerged": 11,
|
||||
"open": 3,
|
||||
"merge_rate": 0.389,
|
||||
"rejection_rate": 0.611,
|
||||
"avg_hours_to_merge": 31.1,
|
||||
"avg_hours_to_close": 20.2,
|
||||
"repos": [
|
||||
"burn-fleet",
|
||||
"hermes-agent",
|
||||
"the-beacon",
|
||||
"the-nexus",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"ezra": {
|
||||
"total_prs": 8,
|
||||
"merged": 2,
|
||||
"closed_unmerged": 3,
|
||||
"open": 3,
|
||||
"merge_rate": 0.4,
|
||||
"rejection_rate": 0.6,
|
||||
"avg_hours_to_merge": 4.4,
|
||||
"avg_hours_to_close": 16.8,
|
||||
"repos": [
|
||||
"burn-fleet",
|
||||
"fleet-ops",
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"kimi": {
|
||||
"total_prs": 6,
|
||||
"merged": 3,
|
||||
"closed_unmerged": 3,
|
||||
"open": 0,
|
||||
"merge_rate": 0.5,
|
||||
"rejection_rate": 0.5,
|
||||
"avg_hours_to_merge": 39.5,
|
||||
"avg_hours_to_close": 0.5,
|
||||
"repos": [
|
||||
"hermes-agent",
|
||||
"the-nexus",
|
||||
"timmy-home"
|
||||
]
|
||||
},
|
||||
"manus": {
|
||||
"total_prs": 6,
|
||||
"merged": 5,
|
||||
"closed_unmerged": 1,
|
||||
"open": 0,
|
||||
"merge_rate": 0.833,
|
||||
"rejection_rate": 0.167,
|
||||
"avg_hours_to_merge": 0.0,
|
||||
"avg_hours_to_close": 18.8,
|
||||
"repos": [
|
||||
"the-nexus",
|
||||
"timmy-config"
|
||||
]
|
||||
},
|
||||
"codex": {
|
||||
"total_prs": 2,
|
||||
"merged": 2,
|
||||
"closed_unmerged": 0,
|
||||
"open": 0,
|
||||
"merge_rate": 1.0,
|
||||
"rejection_rate": 0.0,
|
||||
"avg_hours_to_merge": 2.3,
|
||||
"avg_hours_to_close": null,
|
||||
"repos": [
|
||||
"timmy-config",
|
||||
"timmy-home"
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,249 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Sherlock username recon wrapper — opt-in, cached, normalized JSON output.
|
||||
|
||||
This is an implementation spike (issue #874) to validate local integration
|
||||
of the Sherlock OSINT tool without violating sovereignty/provenance standards.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import sqlite3
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, Any, List
|
||||
|
||||
# Opt-in gate: must have SHERLOCK_ENABLED=1 or --opt-in flag
|
||||
SHERLOCK_ENABLED = os.environ.get("SHERLOCK_ENABLED", "0") == "1"
|
||||
|
||||
# Cache location
|
||||
CACHE_DIR = Path.home() / ".cache" / "timmy"
|
||||
CACHE_DB = CACHE_DIR / "sherlock_cache.db"
|
||||
|
||||
# Normalized output schema version
|
||||
SCHEMA_VERSION = "1.0"
|
||||
|
||||
|
||||
def require_opt_in(opt_in: bool = False) -> None:
|
||||
"""Enforce opt-in gate for Sherlock external dependency."""
|
||||
if not (SHERLOCK_ENABLED or opt_in):
|
||||
raise RuntimeError(
|
||||
"Sherlock is opt-in only. Set SHERLOCK_ENABLED=1 or pass --opt-in."
|
||||
)
|
||||
|
||||
|
||||
|
||||
def check_sherlock_available() -> bool:
|
||||
"""Check if sherlock Python package is installed."""
|
||||
try:
|
||||
import sherlock # type: ignore # noqa: F401
|
||||
return True
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
|
||||
def get_cache_connection() -> sqlite3.Connection:
|
||||
"""Initialize cache directory and return DB connection."""
|
||||
CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
conn = sqlite3.connect(str(CACHE_DB))
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS cache (
|
||||
query_hash TEXT PRIMARY KEY,
|
||||
result_json TEXT NOT NULL,
|
||||
timestamp DATETIME NOT NULL
|
||||
)
|
||||
""")
|
||||
return conn
|
||||
|
||||
|
||||
def compute_query_hash(username: str, sites: Optional[List[str]] = None) -> str:
|
||||
"""Deterministic hash for cache key."""
|
||||
components = [username.lower().strip()]
|
||||
if sites:
|
||||
components.extend(sorted(sites))
|
||||
raw = "|".join(components)
|
||||
return hashlib.sha256(raw.encode()).hexdigest()
|
||||
|
||||
|
||||
def get_cached_result(query_hash: str) -> Optional[Dict[str, Any]]:
|
||||
"""Retrieve cached result if available and not stale (TTL: 7 days)."""
|
||||
conn = get_cache_connection()
|
||||
cur = conn.execute(
|
||||
"SELECT result_json, timestamp FROM cache WHERE query_hash = ?",
|
||||
(query_hash,)
|
||||
)
|
||||
row = cur.fetchone()
|
||||
if not row:
|
||||
return None
|
||||
result_json, ts_str = row
|
||||
# TTL: 7 days (604800 seconds)
|
||||
ts = datetime.fromisoformat(ts_str)
|
||||
age_seconds = (datetime.now(timezone.utc) - ts).total_seconds()
|
||||
if age_seconds >= 604800:
|
||||
return None
|
||||
return json.loads(result_json)
|
||||
|
||||
|
||||
|
||||
|
||||
def save_to_cache(query_hash: str, result: Dict[str, Any]) -> None:
|
||||
"""Persist result to cache."""
|
||||
conn = get_cache_connection()
|
||||
conn.execute(
|
||||
"INSERT OR REPLACE INTO cache (query_hash, result_json, timestamp) VALUES (?, ?, ?)",
|
||||
(query_hash, json.dumps(result), datetime.now(timezone.utc).isoformat())
|
||||
)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
|
||||
def normalize_sherlock_output(
|
||||
raw_result: Dict[str, Any],
|
||||
username: str,
|
||||
sites_checked: Optional[List[str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Convert raw sherlock output into a stable, normalized schema.
|
||||
|
||||
Expected sherlock result shape (via Python API):
|
||||
{
|
||||
"site_name": {"url": "...", "status": "found"|"not found"|"error", ...},
|
||||
...
|
||||
}
|
||||
"""
|
||||
found: List[Dict[str, str]] = []
|
||||
missing: List[str] = []
|
||||
errors: List[Dict[str, str]] = []
|
||||
|
||||
for site_name, site_data in raw_result.items():
|
||||
status = site_data.get("status", "")
|
||||
url = site_data.get("url", "")
|
||||
if status == "found" and url:
|
||||
found.append({"site": site_name, "url": url})
|
||||
elif status == "not found":
|
||||
missing.append(site_name)
|
||||
else:
|
||||
errors.append({"site": site_name, "error": status or "unknown"})
|
||||
|
||||
# Compute totals from the original site list if provided
|
||||
total_sites = len(raw_result) if sites_checked is None else len(sites_checked)
|
||||
|
||||
return {
|
||||
"schema_version": SCHEMA_VERSION,
|
||||
"query": username,
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"found": found,
|
||||
"missing": missing,
|
||||
"errors": errors,
|
||||
"metadata": {
|
||||
"total_sites_checked": total_sites,
|
||||
"found_count": len(found),
|
||||
"missing_count": len(missing),
|
||||
"error_count": len(errors),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def run_sherlock(
|
||||
username: str,
|
||||
sites: Optional[List[str]] = None,
|
||||
timeout: Optional[int] = None,
|
||||
opt_in: bool = False
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Execute Sherlock wrapper with opt-in gate, caching, and normalization.
|
||||
"""
|
||||
require_opt_in(opt_in)
|
||||
|
||||
# Compute cache key
|
||||
query_hash = compute_query_hash(username, sites)
|
||||
|
||||
# Check cache first — avoids dependency requirement on cache hit
|
||||
cached = get_cached_result(query_hash)
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
# Only require sherlock on cache miss
|
||||
if not check_sherlock_available():
|
||||
raise RuntimeError(
|
||||
"Sherlock Python package not installed. "
|
||||
"Install with: pip install sherlock-project"
|
||||
)
|
||||
|
||||
# Call sherlock
|
||||
try:
|
||||
import sherlock
|
||||
from sherlock import sherlock as sherlock_main # type: ignore
|
||||
|
||||
if sites:
|
||||
result = sherlock_main(username, site_list=sites, timeout=timeout or 10)
|
||||
else:
|
||||
result = sherlock_main(username, timeout=timeout or 10)
|
||||
|
||||
normalized = normalize_sherlock_output(result, username, sites)
|
||||
save_to_cache(query_hash, normalized)
|
||||
return normalized
|
||||
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Sherlock execution failed: {e}") from e
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Sherlock username OSINT wrapper — opt-in, cached, normalized JSON"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--query", "-q", required=True,
|
||||
help="Username to search across sites"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--opt-in", action="store_true",
|
||||
help="Explicit opt-in flag (alternatively set SHERLOCK_ENABLED=1)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--sites", "-s", nargs="+",
|
||||
help="Specific sites to check (default: all supported)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--timeout", "-t", type=int, default=10,
|
||||
help="Request timeout per site (default: 10)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--json", action="store_true",
|
||||
help="Output normalized JSON to stdout"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-cache",
|
||||
action="store_true",
|
||||
help="Bypass cached result (if any)"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
result = run_sherlock(
|
||||
username=args.query,
|
||||
sites=args.sites,
|
||||
timeout=args.timeout,
|
||||
opt_in=args.opt_in
|
||||
)
|
||||
if args.json:
|
||||
print(json.dumps(result, indent=2))
|
||||
else:
|
||||
print(f"Query: {result['query']}")
|
||||
print(f"Found: {result['metadata']['found_count']} site(s)")
|
||||
print(f"Missing: {result['metadata']['missing_count']} site(s)")
|
||||
print(f"Errors: {result['metadata']['error_count']} site(s)")
|
||||
for f in result['found']:
|
||||
print(f" [{f['site']}] {f['url']}")
|
||||
return 0
|
||||
except RuntimeError as e:
|
||||
print(f"ERROR: {e}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
Reference in New Issue
Block a user