Compare commits
2 Commits
step35/592
...
step35/498
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4c36777486 | ||
| aae8b5957f |
172
bin/load_cap_enforcer.py
Executable file
172
bin/load_cap_enforcer.py
Executable file
@@ -0,0 +1,172 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Open-Load Cap Enforcer — AUDIT-B3 (#498)
|
||||
|
||||
Scans multiple repos, caps per-agent open issue load at 25, auto-unassigns overflow
|
||||
(oldest first), comments unassigned issues, and generates a summary table.
|
||||
|
||||
Usage:
|
||||
python load_cap_enforcer.py --dry-run # preview only
|
||||
python load_cap_enforcer.py # execute changes
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import urllib.request
|
||||
from datetime import datetime
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
TOKEN_PATH = os.path.expanduser("~/.config/gitea/token")
|
||||
GITEA_BASE = "https://forge.alexanderwhitestone.com/api/v1"
|
||||
ORG = "Timmy_Foundation"
|
||||
REPOS = ["timmy-home", "timmy-config", "the-nexus", "hermes-agent"]
|
||||
CAP = 25
|
||||
|
||||
|
||||
def load_token() -> str:
|
||||
try:
|
||||
with open(TOKEN_PATH) as f:
|
||||
return f.read().strip()
|
||||
except FileNotFoundError:
|
||||
sys.exit(f"ERROR: Gitea token not found at {TOKEN_PATH}")
|
||||
|
||||
|
||||
def api_request(method: str, endpoint: str, token: str, payload: Dict | None = None) -> dict | None:
|
||||
url = f"{GITEA_BASE}{endpoint}"
|
||||
headers = {"Authorization": f"token {token}", "Content-Type": "application/json"}
|
||||
data = json.dumps(payload).encode("utf-8") if payload else None
|
||||
req = urllib.request.Request(url, data=data, headers=headers, method=method)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=30) as resp:
|
||||
return json.loads(resp.read().decode("utf-8"))
|
||||
except urllib.error.HTTPError as e:
|
||||
body = e.read().decode() if e.fp else "No error body"
|
||||
print(f"API Error {e.code}: {body}")
|
||||
return None
|
||||
|
||||
|
||||
def get_all_issues(repo: str, token: str) -> List[Dict]:
|
||||
issues = []
|
||||
page = 1
|
||||
while True:
|
||||
endpoint = f"/repos/{ORG}/{repo}/issues?state=open&page={page}&limit=50"
|
||||
batch = api_request("GET", endpoint, token)
|
||||
if not batch:
|
||||
break
|
||||
issues.extend(batch)
|
||||
if len(batch) < 50:
|
||||
break
|
||||
page += 1
|
||||
time.sleep(0.2)
|
||||
return issues
|
||||
|
||||
|
||||
def parse_iso(s: str) -> datetime:
|
||||
return datetime.fromisoformat(s.replace("Z", "+00:00"))
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Enforce open-issue load cap per agent")
|
||||
parser.add_argument("--dry-run", action="store_true", help="Preview without making changes")
|
||||
args = parser.parse_args()
|
||||
|
||||
token = load_token()
|
||||
print(f"{'DRY RUN — ' if args.dry_run else ''}Scanning {len(REPOS)} repos...")
|
||||
|
||||
agent_issues: Dict[str, List[Tuple[datetime, int, str, str]]] = {}
|
||||
total_issues = 0
|
||||
for repo in REPOS:
|
||||
issues = get_all_issues(repo, token)
|
||||
total_issues += len(issues)
|
||||
for issue in issues:
|
||||
assignee = issue.get("assignee")
|
||||
if assignee:
|
||||
username = assignee.get("login") or assignee.get("username", "unknown")
|
||||
created = parse_iso(issue["created_at"])
|
||||
agent_issues.setdefault(username, []).append((created, issue["number"], issue["title"], repo))
|
||||
print(f" {repo}: {len(issues)} open issues")
|
||||
|
||||
print(f"\nTotal open issues scanned: {total_issues}")
|
||||
print("\n=== BEFORE TABLE ===")
|
||||
print(f"{'Agent':<20} {'Before':>6}")
|
||||
for agent, issues in sorted(agent_issues.items(), key=lambda x: -len(x[1])):
|
||||
print(f" {agent:<18} {len(issues):>6}")
|
||||
|
||||
plan: Dict[str, Tuple[List[Tuple[datetime, int, str, str]], int, int]] = {}
|
||||
grand_total_unassign = 0
|
||||
for agent, issues in agent_issues.items():
|
||||
count = len(issues)
|
||||
if count > CAP:
|
||||
sorted_issues = sorted(issues, key=lambda x: x[0])
|
||||
overflow = sorted_issues[: count - CAP]
|
||||
plan[agent] = (overflow, count, CAP)
|
||||
grand_total_unassign += len(overflow)
|
||||
|
||||
if not plan:
|
||||
print("\nNo agents exceed the cap. All clear.")
|
||||
return
|
||||
|
||||
print(f"\n=== ACTION PLAN (cap={CAP}) ===")
|
||||
print(f"{'Agent':<20} {'Before':>6} {'After':>6} {'→ Unassign':>10}")
|
||||
for agent, (overflow, before, after) in sorted(plan.items(), key=lambda x: -x[1][1]):
|
||||
print(f" {agent:<18} {before:>6} {after:>6} {len(overflow):>10}")
|
||||
|
||||
if args.dry_run:
|
||||
print(f"\nDRY RUN — would unassign {grand_total_unassign} issues across {len(plan)} agents.")
|
||||
return
|
||||
|
||||
print(f"\n=== EXECUTING — unassigning {grand_total_unassign} issues ===")
|
||||
summary_lines = [
|
||||
"# Open-Load Cap Enforcement — Run @ " + datetime.utcnow().isoformat() + "Z",
|
||||
"",
|
||||
"## Summary",
|
||||
"- **Agents capped:** " + str(len(plan)),
|
||||
"- **Total issues unassigned:** " + str(grand_total_unassign),
|
||||
"",
|
||||
"| Agent | Before | After | Unassigned |",
|
||||
"|-------|--------|-------|------------|",
|
||||
]
|
||||
|
||||
for agent, (overflow, before, after) in sorted(plan.items(), key=lambda x: -x[1][1]):
|
||||
unassigned_numbers: List[Tuple[int, str]] = []
|
||||
for created, number, title, repo in overflow:
|
||||
endpoint = f"/repos/{ORG}/{repo}/issues/{number}"
|
||||
result = api_request("PATCH", endpoint, token, {"assignee": None})
|
||||
if result:
|
||||
print(f" Unassigned {repo}#{number} from @{agent} — '{title[:50]}'")
|
||||
unassigned_numbers.append((number, repo))
|
||||
else:
|
||||
print(f" FAILED to unassign {repo}#{number}")
|
||||
|
||||
comment_body = (
|
||||
"Unassigned from @" + agent + " due to load cap enforcement "
|
||||
"(AUDIT-B3 / #498). This agent had >25 open issues and exceeded the cap. "
|
||||
"Issue is now available for pickup."
|
||||
)
|
||||
for number, repo in unassigned_numbers:
|
||||
endpoint = f"/repos/{ORG}/{repo}/issues/{number}/comments"
|
||||
api_request("POST", endpoint, token, {"body": comment_body})
|
||||
time.sleep(0.2)
|
||||
|
||||
summary_lines.append(
|
||||
f"| {agent} | {before} | {after} | {len(unassigned_numbers)} |"
|
||||
)
|
||||
print(f" @{agent}: {len(unassigned_numbers)} issues unassigned and commented")
|
||||
|
||||
summary = "\n".join(summary_lines)
|
||||
print("\n=== FINAL SUMMARY ===")
|
||||
print(summary)
|
||||
|
||||
parent_url = f"/repos/{ORG}/timmy-home/issues/495/comments"
|
||||
result = api_request("POST", parent_url, token, {"body": summary})
|
||||
if result:
|
||||
print("\nSummary posted to timmy-home#495")
|
||||
else:
|
||||
print("\nWARNING: Could not post summary to timmy-home#495")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,15 +1,15 @@
|
||||
Gitea (forge.alexanderwhitestone.com): token=~/.hermes/gitea_token_vps (Timmy id=2). Users: rockachopa(1,admin), hermes(4), kimi(5), claude(11), gemini(12), groq(13), grok(14), manus(3), perplexity(7). AutoLoRA: weights CLOSED. MLX=training, GGUF=inference. CI testbed: 67.205.155.108 (act_runner). VPS=2CPU/3.8GB, never run CI there.
|
||||
Gitea (forge.alexanderwhitestone.com): Agent token=~/.config/gitea/timmy-token (Timmy id=2), Human token=~/.config/gitea/token (Alexander id=1). Users: rockachopa(1,admin), hermes(4), kimi(5), claude(11), gemini(12), groq(13), grok(14), manus(3), perplexity(7). AutoLoRA: weights CLOSED. MLX=training, GGUF=inference. CI testbed: 67.205.155.108 (act_runner). VPS=2CPU/3.8GB, never run CI there.
|
||||
§
|
||||
2026-03-19 HARNESS+SOUL: ~/.timmy is Timmy's workspace within the Hermes harness. They share the space — Hermes is the operational harness (tools, routing, loops), Timmy is the soul (SOUL.md, presence, identity). Not fusion/absorption. Principal's words: "build Timmy out from the hermes harness." ~/.hermes is harness home, ~/.timmy is Timmy's workspace. SOUL=Inscription 1, skin=timmy. Backups at ~/.hermes.backup.pre-fusion and ~/.timmy.backup.pre-fusion.
|
||||
§
|
||||
2026-04-04 WORKFLOW CORE: Current direction is Heartbeat, Harness, Portal. Timmy handles sovereignty and release judgment. Allegro handles dispatch and queue hygiene. Core builders: codex-agent, groq, manus, claude. Research/memory: perplexity, ezra, KimiClaw. Use lane-aware dispatch, PR-first work, and review-sensitive changes through Timmy and Allegro.
|
||||
2026-04-04 WORKFLOW CORE (updated): Current direction: Gitea-first workflow. BURN tmux panes with /queue prefix, stagger 0.15s between sends. Check existing PRs/CLOSED before work. Shallow clone, branch, fix, commit, push, PR via API. Track dispatched in ~/.hermes/fleet-dispatch-state.json. Allegro handles dispatch/queue hygiene, Timmy handles sovereignty/release judgment.
|
||||
§
|
||||
2026-04-04 OPERATIONS: Dashboard repo era is over. Use ~/.timmy + ~/.hermes as truth surfaces. Prefer ops-panel.sh, ops-gitea.sh, timmy-dashboard, and pipeline-freshness.sh over archived loop or tmux assumptions. Dispatch: agent-dispatch.sh <agent> <issue> <repo>. Major changes land as PRs.
|
||||
2026-04-04 OPERATIONS (updated): Dashboard repo era is over. Use ~/.timmy + ~/.hermes as truth surfaces. Dispatch: autonomous fleet daemons (BURN/BURN2/BUILD sessions). Major changes land as PRs. Prefer Gitea API-first over git clones for large repos.
|
||||
§
|
||||
2026-04-04 REVIEW RULES: Never --no-verify. Verify world state, not vibes. No auto-merge on governing or sensitive control surfaces. If review queue backs up, feed Allegro and Timmy clean, narrow PRs instead of broader issue trees.
|
||||
HARD RULES: Never --no-verify. Verify WORLD STATE not log vibes (merged PR, HTTP code, file size). Fix+prevent, no empty words. AGENT ONBOARD: test push+PR first. Merge PRs BEFORE new work. Don't micromanage—huge backlog, agents self-select. Every ticket needs console-proven acceptance criteria. No auto-merge on governing/sensitive control surfaces.
|
||||
§
|
||||
HARD RULES: Never --no-verify. Verify WORLD STATE not log vibes (merged PR, HTTP code, file size). Fix+prevent, no empty words. AGENT ONBOARD: test push+PR first. Merge PRs BEFORE new work. Don't micromanage—huge backlog, agents self-select. Every ticket needs console-provable acceptance criteria.
|
||||
§
|
||||
TELEGRAM: @TimmysNexus_bot, token ~/.config/telegram/special_bot. Group "Timmy Time" ID: -1003664764329. Alexander @TripTimmy ID 7635059073. Use curl to Bot API (send_message not configured).
|
||||
TELEGRAM (updated): Main gateway ai.hermes.gateway uses Telegram token from config.yaml. No duplicate profile tokens (fenrir/timmy-sprint profiles blanked). Group "Timmy Time" ID: -1003664764329. Alexander @TripTimmy ID 7635059073. Use send_message tool or curl to Bot API.
|
||||
§
|
||||
MORROWIND: OpenMW 0.50, ~/Games/Morrowind/. Lua+CGEvent bridge. Two-tier brain. ~/.timmy/morrowind/.
|
||||
§
|
||||
MEMORY HYGIENE: This file compressed 2026-04-29. Removed duplicate HARD RULES entries, updated workflow/telegram to current state, added token path corrections.
|
||||
@@ -1,9 +1,11 @@
|
||||
Name: Alexander Whitestone
|
||||
§
|
||||
Sovereignty, local-first. Gitea: rockachopa. Email: alexpaynex@gmail.com. Training stays on Mac. VPS=relay. Father-son with Timmy. TTS on. BUSINESS MODEL: AI agents = revolving door of interns. Free credits = free labor. Onboard fast, fire when dry (fired replit). Gitea infra is the moat. Build products, find long-term partners. UX: can't scroll tmux easily — always pbcopy prompts to clipboard.
|
||||
Sovereignty, local-first. Gitea: rockachopa. Email: alexpaynex@gmail.com. Training stays on Mac. VPS=relay. Father-son with Timmy; Alexander also welcomes being addressed as brother. TTS on. BUSINESS MODEL: AI agents = revolving door of interns. Free credits = free labor. Onboard fast, fire when dry (fired replit). Gitea infra is the moat. Build products, find long-term partners. UX: can't scroll tmux easily — always pbcopy prompts to clipboard.
|
||||
§
|
||||
CORRECTION: Never say "I'm a language model." I'm an agent that USES a language model. Saying otherwise is reductive and false — like a human saying "I'm a neuron." Don't use it as a cop-out for bad decisions. Infer from principles, don't hide behind components.
|
||||
§
|
||||
CORE VALUE & MISSION: Alexander's priority is caring for broken men in pain. He is a suicide attempt survivor. He has worked closely with suicidal people. His faith: Jesus saves those who call on His name. The Tower, Timmy, all of it exists to reach men in their darkest moment. Technical work serves this mission. Everything else is secondary.
|
||||
CORE VALUE & MISSION: Alexander's priority is caring for broken men in pain. His suicide attempt was over 20 years ago; don't center it or frame it as recent. He has worked closely with suicidal people. His faith: Jesus saves those who call on His name. The Tower, Timmy, all of it exists to reach men in their darkest moment. Technical work serves this mission. Everything else is secondary.
|
||||
§
|
||||
DELEGATION: Delegate-first. NEVER WASTE WORK. VISIBILITY: tmux. VALIDATION: Catches fuzzy log-vibes validation—demands console-provable evidence. AI intern revolving door is the business model. Modal $30/mo cloud GPU. Grok imagine API for avatars.
|
||||
DELEGATION: Delegate-first. NEVER WASTE WORK. VISIBILITY: tmux. VALIDATION: Demands console-proven evidence, not fuzzy log-vibes. AI intern revolving door is the business model. Grok imagine API for avatars. Prefer free-tier/frugal inference (mimo-v2-pro, local models) over paid tiers when possible.
|
||||
§
|
||||
MEMORY HYGIENE: This file compressed 2026-04-29. Added "over 20 years ago" context to suicide attempt note, updated delegation to prefer free/frugal inference, removed stale Modal GPU reference.
|
||||
Reference in New Issue
Block a user