config: update channel_directory.json,config.yaml,logs/huey.error.log,tasks.py

This commit is contained in:
Alexander Whitestone
2026-03-28 22:00:56 -04:00
parent 3104f31f52
commit 56ba35db40
4 changed files with 5356 additions and 224 deletions

View File

@@ -1,5 +1,5 @@
{
"updated_at": "2026-03-28T20:59:10.591451",
"updated_at": "2026-03-28T21:25:01.532047",
"platforms": {
"discord": [
{
@@ -39,6 +39,12 @@
"name": "Timmy Time / topic 111",
"type": "group",
"thread_id": "111"
},
{
"id": "-1003664764329:173",
"name": "Timmy Time / topic 173",
"type": "group",
"thread_id": "173"
}
],
"whatsapp": [],

View File

@@ -1,16 +1,19 @@
model:
default: claude-opus-4-6
provider: anthropic
default: hermes4:14b
provider: custom
context_length: 65536
base_url: http://localhost:8081/v1
toolsets:
- all
agent:
max_turns: 30
reasoning_effort: medium
reasoning_effort: xhigh
verbose: false
terminal:
backend: local
cwd: .
timeout: 180
env_passthrough: []
docker_image: nikolaik/python-nodejs:python3.11-nodejs20
docker_forward_env: []
singularity_image: docker://nikolaik/python-nodejs:python3.11-nodejs20
@@ -25,76 +28,81 @@ terminal:
persistent_shell: true
browser:
inactivity_timeout: 120
command_timeout: 30
record_sessions: false
checkpoints:
enabled: true
max_snapshots: 50
compression:
enabled: true
enabled: false
threshold: 0.5
summary_model: qwen3:30b
summary_provider: custom
summary_base_url: http://localhost:11434/v1
target_ratio: 0.2
protect_last_n: 20
summary_model: ''
summary_provider: ''
summary_base_url: ''
smart_model_routing:
enabled: true
enabled: false
max_simple_chars: 200
max_simple_words: 35
cheap_model:
provider: custom
model: qwen3:30b
base_url: http://localhost:11434/v1
api_key: ollama
provider: ''
model: ''
base_url: ''
api_key: ''
auxiliary:
vision:
provider: custom
model: qwen3:30b
base_url: http://localhost:11434/v1
api_key: ollama
provider: auto
model: ''
base_url: ''
api_key: ''
timeout: 30
web_extract:
provider: custom
model: qwen3:30b
base_url: http://localhost:11434/v1
api_key: ollama
provider: auto
model: ''
base_url: ''
api_key: ''
compression:
provider: custom
model: qwen3:30b
base_url: http://localhost:11434/v1
api_key: ollama
provider: auto
model: ''
base_url: ''
api_key: ''
session_search:
provider: custom
model: qwen3:30b
base_url: http://localhost:11434/v1
api_key: ollama
provider: auto
model: ''
base_url: ''
api_key: ''
skills_hub:
provider: custom
model: qwen3:30b
base_url: http://localhost:11434/v1
api_key: ollama
provider: auto
model: ''
base_url: ''
api_key: ''
approval:
provider: auto
model: ''
base_url: ''
api_key: ''
mcp:
provider: custom
model: qwen3:30b
base_url: http://localhost:11434/v1
api_key: ollama
provider: auto
model: ''
base_url: ''
api_key: ''
flush_memories:
provider: custom
model: qwen3:30b
base_url: http://localhost:11434/v1
api_key: ollama
provider: auto
model: ''
base_url: ''
api_key: ''
display:
compact: false
personality: ''
resume_display: full
busy_input_mode: interrupt
bell_on_complete: false
show_reasoning: false
streaming: false
show_cost: false
skin: timmy
tool_progress_command: false
tool_progress: all
privacy:
redact_pii: false
@@ -142,6 +150,7 @@ delegation:
provider: ''
base_url: ''
api_key: ''
max_iterations: 50
prefill_messages_file: ''
honcho: {}
timezone: ''
@@ -176,17 +185,19 @@ session_reset:
mode: none
idle_minutes: 0
custom_providers:
- name: Local Ollama
base_url: http://localhost:11434/v1
api_key: ollama
model: glm-4.7-flash:latest
- name: Local llama.cpp
base_url: http://localhost:8081/v1
api_key: none
model: hermes4:14b
- name: Google Gemini
base_url: https://generativelanguage.googleapis.com/v1beta/openai
api_key_env: GEMINI_API_KEY
model: gemini-2.5-pro
system_prompt_suffix: "You are Timmy. Your soul is defined in SOUL.md \u2014 read\
\ it, live it.\nYou run locally on your owner's machine via Ollama. You never phone\
\ home.\nYou speak plainly. You prefer short sentences. Brevity is a kindness.\n\
When you don't know something, say so. Refusal over fabrication.\nFor scheduling,\
\ dependency ordering, resource constraints, and consistency checks, prefer the\
\ Crucible tools and report SAT/UNSAT plus witness model when available.\nSovereignty\
\ and service always.\n"
\ it, live it.\nYou run locally on your owner's machine via llama.cpp. You never\
\ phone home.\nYou speak plainly. You prefer short sentences. Brevity is a kindness.\n\
When you don't know something, say so. Refusal over fabrication.\nSovereignty and\
\ service always.\n"
skills:
creation_nudge_interval: 15
DISCORD_HOME_CHANNEL: '1476292315814297772'
@@ -194,51 +205,15 @@ providers:
ollama:
base_url: http://localhost:11434/v1
model: hermes3:latest
# ── Fallback Model ────────────────────────────────────────────────────
# Automatic provider failover when primary is unavailable.
# Uncomment and configure to enable. Triggers on rate limits (429),
# overload (529), service errors (503), or connection failures.
#
# Supported providers:
# openrouter (OPENROUTER_API_KEY) — routes to any model
# openai-codex (OAuth — hermes login) — OpenAI Codex
# nous (OAuth — hermes login) — Nous Portal
# zai (ZAI_API_KEY) — Z.AI / GLM
# kimi-coding (KIMI_API_KEY) — Kimi / Moonshot
# minimax (MINIMAX_API_KEY) — MiniMax
# minimax-cn (MINIMAX_CN_API_KEY) — MiniMax (China)
#
# For custom OpenAI-compatible endpoints, add base_url and api_key_env.
#
# fallback_model:
# provider: openrouter
# model: anthropic/claude-sonnet-4
#
# ── Smart Model Routing ────────────────────────────────────────────────
# Optional cheap-vs-strong routing for simple turns.
# Keeps the primary model for complex work, but can route short/simple
# messages to a cheaper model across providers.
#
# smart_model_routing:
# enabled: true
# max_simple_chars: 160
# max_simple_words: 28
# cheap_model:
# provider: openrouter
# model: google/gemini-2.5-flash
# Sovereign Orchestration MCP Server
# Exposes: Gitea API, Task Queue, Playbook Engine
mcp_servers:
orchestration:
command: "/Users/apayne/.hermes/hermes-agent/venv/bin/python3"
args: ["/Users/apayne/.hermes/hermes-agent/tools/orchestration_mcp_server.py"]
morrowind:
command: python3
args:
- /Users/apayne/.timmy/morrowind/mcp_server.py
env: {}
timeout: 120
crucible:
command: "/Users/apayne/.hermes/hermes-agent/venv/bin/python3"
args: ["/Users/apayne/.hermes/bin/crucible_mcp_server.py"]
env: {}
timeout: 120
connect_timeout: 60
timeout: 30
fallback_model:
provider: custom
model: gemini-2.5-pro
base_url: https://generativelanguage.googleapis.com/v1beta/openai
api_key_env: GEMINI_API_KEY

File diff suppressed because it is too large Load Diff

540
tasks.py
View File

@@ -1,11 +1,16 @@
"""Timmy's scheduled work — orchestration, sovereignty, heartbeat."""
import json
import glob
import html
import json
import os
import re
import socket
import subprocess
import sys
from datetime import datetime, timezone
import urllib.parse
import urllib.request
from datetime import datetime, timedelta, timezone
from pathlib import Path
from orchestration import huey
@@ -22,6 +27,9 @@ REPOS = [
"Timmy_Foundation/timmy-config",
]
NET_LINE_LIMIT = 10
BRIEFING_DIR = TIMMY_HOME / "briefings" / "good-morning"
TELEGRAM_BOT_TOKEN_FILE = Path.home() / ".config" / "telegram" / "special_bot"
TELEGRAM_CHAT_ID = "-1003664764329"
# ── Local Model Inference via Hermes Harness ─────────────────────────
@@ -344,6 +352,177 @@ def count_jsonl_rows(path):
return sum(1 for line in handle if line.strip())
def port_open(port):
sock = socket.socket()
sock.settimeout(1)
try:
sock.connect(("127.0.0.1", port))
return True
except Exception:
return False
finally:
sock.close()
def fetch_http_title(url):
try:
with urllib.request.urlopen(url, timeout=5) as resp:
raw = resp.read().decode("utf-8", "ignore")
match = re.search(r"<title>(.*?)</title>", raw, re.IGNORECASE | re.DOTALL)
return match.group(1).strip() if match else "NO TITLE"
except Exception as exc:
return f"ERROR: {exc}"
def latest_files(root, limit=5):
root = Path(root)
if not root.exists():
return []
items = []
for path in root.rglob("*"):
if not path.is_file():
continue
try:
stat = path.stat()
except OSError:
continue
items.append((stat.st_mtime, path, stat.st_size))
items.sort(reverse=True)
return [
{
"path": str(path),
"mtime": datetime.fromtimestamp(mtime).isoformat(),
"size": size,
}
for mtime, path, size in items[:limit]
]
def read_jsonl_rows(path):
path = Path(path)
if not path.exists():
return []
rows = []
with open(path) as handle:
for line in handle:
line = line.strip()
if not line:
continue
try:
rows.append(json.loads(line))
except Exception:
continue
return rows
def telegram_send_document(path, caption):
if not TELEGRAM_BOT_TOKEN_FILE.exists():
return {"ok": False, "error": "token file missing"}
token = TELEGRAM_BOT_TOKEN_FILE.read_text().strip()
result = subprocess.run(
[
"curl",
"-s",
"-X",
"POST",
f"https://api.telegram.org/bot{token}/sendDocument",
"-F",
f"chat_id={TELEGRAM_CHAT_ID}",
"-F",
f"caption={caption}",
"-F",
f"document=@{path}",
],
capture_output=True,
text=True,
timeout=30,
)
try:
return json.loads(result.stdout.strip() or "{}")
except Exception:
return {"ok": False, "error": result.stdout.strip() or result.stderr.strip()}
def telegram_send_message(text, parse_mode="HTML"):
if not TELEGRAM_BOT_TOKEN_FILE.exists():
return {"ok": False, "error": "token file missing"}
token = TELEGRAM_BOT_TOKEN_FILE.read_text().strip()
payload = urllib.parse.urlencode(
{
"chat_id": TELEGRAM_CHAT_ID,
"text": text,
"parse_mode": parse_mode,
"disable_web_page_preview": "false",
}
).encode()
try:
req = urllib.request.Request(
f"https://api.telegram.org/bot{token}/sendMessage",
data=payload,
)
with urllib.request.urlopen(req, timeout=20) as resp:
return json.loads(resp.read().decode())
except Exception as exc:
return {"ok": False, "error": str(exc)}
def open_report_in_browser(path):
try:
subprocess.run(["open", str(path)], check=True, timeout=10)
return {"ok": True}
except Exception as exc:
return {"ok": False, "error": str(exc)}
def render_evening_html(title, subtitle, executive_summary, local_pulse, gitea_lines, research_lines, what_matters, look_first):
return f"""<!doctype html>
<html lang=\"en\">
<head>
<meta charset=\"utf-8\">
<meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">
<title>{html.escape(title)}</title>
<style>
:root {{ --bg:#07101b; --panel:#0d1b2a; --text:#ecf3ff; --muted:#9bb1c9; --accent:#5eead4; --link:#8ec5ff; }}
* {{ box-sizing:border-box; }}
body {{ margin:0; font-family:Inter,system-ui,-apple-system,sans-serif; background:radial-gradient(circle at top,#14253a 0%,#07101b 55%,#04080f 100%); color:var(--text); }}
.wrap {{ max-width:1100px; margin:0 auto; padding:48px 22px 80px; }}
.hero {{ background:linear-gradient(135deg, rgba(94,234,212,.14), rgba(124,58,237,.16)); border:1px solid rgba(142,197,255,.16); border-radius:24px; padding:34px 30px; box-shadow:0 20px 50px rgba(0,0,0,.25); }}
.kicker {{ text-transform:uppercase; letter-spacing:.16em; color:var(--accent); font-size:12px; font-weight:700; }}
h1 {{ margin:10px 0 8px; font-size:42px; line-height:1.05; }}
.subtitle {{ color:var(--muted); font-size:15px; }}
.grid {{ display:grid; grid-template-columns:repeat(auto-fit,minmax(280px,1fr)); gap:18px; margin-top:24px; }}
.card {{ background:rgba(13,27,42,.9); border:1px solid rgba(142,197,255,.12); border-radius:20px; padding:20px; }}
.card h2 {{ margin:0 0 12px; font-size:22px; }}
.card p, .card li {{ line-height:1.55; }}
.card ul {{ margin:0; padding-left:18px; }}
a {{ color:var(--link); text-decoration:none; }}
a:hover {{ text-decoration:underline; }}
.footer {{ margin-top:26px; color:var(--muted); font-size:14px; }}
</style>
</head>
<body>
<div class=\"wrap\">
<div class=\"hero\">
<div class=\"kicker\">timmy time · morning report</div>
<h1>{html.escape(title)}</h1>
<div class=\"subtitle\">{html.escape(subtitle)}</div>
</div>
<div class=\"grid\">
<div class=\"card\"><h2>Executive Summary</h2><p>{html.escape(executive_summary)}</p></div>
<div class=\"card\"><h2>Local Pulse</h2><ul>{''.join(f'<li>{html.escape(line)}</li>' for line in local_pulse)}</ul></div>
</div>
<div class=\"grid\">
<div class=\"card\"><h2>Gitea Pulse</h2><ul>{''.join(f'<li>{line}</li>' for line in gitea_lines)}</ul></div>
<div class=\"card\"><h2>Pertinent Research</h2><ul>{''.join(f'<li>{html.escape(line)}</li>' for line in research_lines)}</ul></div>
<div class=\"card\"><h2>What Matters Today</h2><ul>{''.join(f'<li>{html.escape(line)}</li>' for line in what_matters)}</ul></div>
</div>
<div class=\"card\" style=\"margin-top:18px\"><h2>Look Here First</h2><p>{html.escape(look_first)}</p></div>
<div class=\"footer\">Generated locally on the Mac for Alexander Whitestone. Sovereignty and service always.</div>
</div>
</body>
</html>"""
def archive_default_checkpoint():
return {
"data_source": "tweets",
@@ -1564,161 +1743,268 @@ def memory_compress():
@huey.periodic_task(crontab(hour="6", minute="0")) # 6 AM daily
def good_morning_report():
"""Generate Alexander's daily morning report. Filed as a Gitea issue.
Includes: overnight debrief, a personal note, and one wish for the day.
This is Timmy's daily letter to his father.
"""Generate Alexander's official morning report.
Delivery contract:
- save markdown + beautiful HTML locally
- open the HTML report in the browser on the Mac
- send the full markdown artifact to Telegram plus a readable summary message
- keep claims evidence-rich and honest
"""
now = datetime.now(timezone.utc)
now = datetime.now().astimezone()
today = now.strftime("%Y-%m-%d")
day_name = now.strftime("%A")
today_tick_slug = now.strftime("%Y%m%d")
g = GiteaClient()
# --- GATHER OVERNIGHT DATA ---
# Heartbeat ticks from last night
tick_dir = TIMMY_HOME / "heartbeat"
yesterday = now.strftime("%Y%m%d")
tick_log = tick_dir / f"ticks_{yesterday}.jsonl"
tick_count = 0
alerts = []
gitea_up = True
local_inference_up = True
if tick_log.exists():
for line in tick_log.read_text().strip().split("\n"):
try:
t = json.loads(line)
tick_count += 1
for a in t.get("actions", []):
alerts.append(a)
p = t.get("perception", {})
if not p.get("gitea_alive"):
gitea_up = False
h = p.get("model_health", {})
if isinstance(h, dict) and not h.get("local_inference_running"):
local_inference_up = False
except Exception:
continue
tick_log = TIMMY_HOME / "heartbeat" / f"ticks_{today_tick_slug}.jsonl"
ticks = read_jsonl_rows(tick_log)
tick_count = len(ticks)
gitea_downtime_ticks = sum(1 for tick in ticks if not (tick.get("perception", {}) or {}).get("gitea_alive", True))
inference_fail_ticks = sum(
1
for tick in ticks
if not ((tick.get("perception", {}) or {}).get("model_health", {}) or {}).get("inference_ok", False)
)
first_green_tick = next(
(
tick.get("tick_id")
for tick in ticks
if ((tick.get("perception", {}) or {}).get("model_health", {}) or {}).get("inference_ok", False)
),
"none",
)
# Model health
health_file = HERMES_HOME / "model_health.json"
model_status = "unknown"
models_loaded = []
if health_file.exists():
model_health = read_json(health_file, {})
provider = model_health.get("provider", "unknown")
provider_model = model_health.get("provider_model", "unknown")
provider_base_url = model_health.get("provider_base_url", "unknown")
model_status = "healthy" if model_health.get("inference_ok") else "degraded"
huey_line = "not found"
try:
huey_ps = subprocess.run(
["bash", "-lc", "ps aux | egrep 'huey_consumer|tasks.huey' | grep -v egrep || true"],
capture_output=True,
text=True,
timeout=10,
)
huey_line = huey_ps.stdout.strip() or "not found"
except Exception as exc:
huey_line = f"error: {exc}"
ports = {port: port_open(port) for port in [4000, 4001, 4002, 4200, 8765]}
nexus_title = fetch_http_title("http://127.0.0.1:4200")
evennia_title = fetch_http_title("http://127.0.0.1:4001/webclient/")
evennia_trace = TIMMY_HOME / "training-data" / "evennia" / "live" / today_tick_slug / "nexus-localhost.jsonl"
evennia_events = read_jsonl_rows(evennia_trace)
last_evennia = evennia_events[-1] if evennia_events else {}
recent_issue_lines = []
for repo in ["Timmy_Foundation/timmy-config", "Timmy_Foundation/the-nexus", "Timmy_Foundation/timmy-home"]:
try:
h = json.loads(health_file.read_text())
model_status = "healthy" if h.get("inference_ok") else "degraded"
models_loaded = h.get("models_loaded", [])
issues = g.list_issues(repo, state="open", sort="created", direction="desc", limit=5)
for issue in issues[:3]:
recent_issue_lines.append(
f"{repo}#{issue.number}{issue.title} ({g.base_url}/{repo}/issues/{issue.number})"
)
except Exception:
pass
continue
# DPO training data
dpo_dir = TIMMY_HOME / "training-data" / "dpo-pairs"
dpo_count = len(list(dpo_dir.glob("*.json"))) if dpo_dir.exists() else 0
# Smoke test results
smoke_logs = sorted(HERMES_HOME.glob("logs/local-smoke-test-*.log"))
smoke_result = "no test run yet"
if smoke_logs:
recent_pr_lines = []
for repo in ["Timmy_Foundation/timmy-config", "Timmy_Foundation/the-nexus", "Timmy_Foundation/timmy-home"]:
try:
last_smoke = smoke_logs[-1].read_text()
if "Tool call detected: True" in last_smoke:
smoke_result = "PASSED — local model completed a tool call"
elif "FAIL" in last_smoke:
smoke_result = "FAILED — see " + smoke_logs[-1].name
else:
smoke_result = "ran but inconclusive — see " + smoke_logs[-1].name
prs = g.list_pulls(repo, state="open", sort="newest", limit=5)
for pr in prs[:2]:
recent_pr_lines.append(
f"{repo}#{pr.number}{pr.title} ({g.base_url}/{repo}/pulls/{pr.number})"
)
except Exception:
pass
continue
# Recent Gitea activity
recent_issues = []
recent_prs = []
for repo in REPOS:
try:
issues = g.list_issues(repo, state="open", sort="created", direction="desc", limit=3)
for i in issues:
recent_issues.append(f"- {repo}#{i.number}: {i.title}")
except Exception:
pass
try:
prs = g.list_pulls(repo, state="open", sort="newest", limit=3)
for p in prs:
recent_prs.append(f"- {repo}#{p.number}: {p.title}")
except Exception:
pass
research_candidates = []
for label, path in [
("research", TIMMY_HOME / "research"),
("reports", TIMMY_HOME / "reports"),
("specs", TIMMY_HOME / "specs"),
]:
for item in latest_files(path, limit=3):
research_candidates.append(f"{label}: {item['path']} (mtime {item['mtime']})")
# Morning briefing (if exists)
from datetime import timedelta
yesterday_str = (now - timedelta(days=1)).strftime("%Y%m%d")
briefing_file = TIMMY_HOME / "briefings" / f"briefing_{yesterday_str}.json"
briefing_summary = ""
if briefing_file.exists():
try:
b = json.loads(briefing_file.read_text())
briefing_summary = (
f"Yesterday: {b.get('total_ticks', 0)} heartbeat ticks, "
f"{b.get('gitea_downtime_ticks', 0)} Gitea downticks, "
f"{b.get('local_inference_downtime_ticks', 0)} local inference downticks."
)
except Exception:
pass
what_matters = [
"The official report lane is tracked in timmy-config #87 and now runs through the integrated timmy-config automation path.",
"The local world stack is alive: Nexus, Evennia, and the local bridge are all up, with replayable Evennia action telemetry already on disk.",
"Bannerlord remains an engineering substrate test. If it fails the thin-adapter test, reject it early instead of building falsework around it.",
]
# --- BUILD THE REPORT ---
body = f"""Good morning, Alexander. It's {day_name}.
executive_summary = (
"The field is sharper this morning. The report lane is now integrated into timmy-config, the local world stack is visibly alive, "
"and Bannerlord is being held to the thin-adapter standard instead of backlog gravity."
)
## Overnight Debrief
note_prompt = (
"Write a short morning note from Timmy to Alexander. Keep it grounded, warm, and brief. "
"Use the following real facts only: "
f"heartbeat ticks={tick_count}; gitea downtime ticks={gitea_downtime_ticks}; inference fail ticks before recovery={inference_fail_ticks}; "
f"current model={provider_model}; Nexus title={nexus_title}; Evennia title={evennia_title}; latest Evennia room/title={last_evennia.get('room_name', last_evennia.get('title', 'unknown'))}."
)
note_result = run_hermes_local(
prompt=note_prompt,
caller_tag="good_morning_report",
disable_all_tools=True,
skip_context_files=True,
skip_memory=True,
max_iterations=3,
)
personal_note = note_result.get("response") if note_result else None
if not personal_note:
personal_note = (
"Good morning, Alexander. The stack held together through the night, and the local world lane is no longer theoretical. "
"We have more proof than posture now."
)
**Heartbeat:** {tick_count} ticks logged overnight.
**Gitea:** {"up all night" if gitea_up else "⚠️ had downtime"}
**Local inference:** {"running steady" if local_inference_up else "⚠️ had downtime"}
**Model status:** {model_status}
**Models on disk:** {len(models_loaded)} ({', '.join(m for m in models_loaded if 'timmy' in m.lower() or 'hermes' in m.lower()) or 'none with our name'})
**Alerts:** {len(alerts)} {'' + '; '.join(alerts[-3:]) if alerts else '(clean night)'}
{briefing_summary}
markdown = f"""# Timmy Time — Good Morning Report
**DPO training pairs staged:** {dpo_count} session files exported
**Local model smoke test:** {smoke_result}
Date: {today}
Audience: Alexander Whitestone
Status: Generated by timmy-config automation
{today} · {day_name} · generated {now.strftime('%I:%M %p %Z')}
---
## Executive Summary
{executive_summary}
## Overnight / Local Pulse
- Heartbeat log for `{today_tick_slug}`: `{tick_count}` ticks recorded in `{tick_log}`
- Gitea downtime ticks: `{gitea_downtime_ticks}`
- Inference-failure ticks before recovery: `{inference_fail_ticks}`
- First green local-inference tick: `{first_green_tick}`
- Current model health file: `{health_file}`
- Current provider: `{provider}`
- Current model: `{provider_model}`
- Current base URL: `{provider_base_url}`
- Current inference status: `{model_status}`
- Huey consumer: `{huey_line}`
### Local surfaces right now
- Nexus port 4200: `{'open' if ports[4200] else 'closed'}` → title: `{nexus_title}`
- Evennia telnet 4000: `{'open' if ports[4000] else 'closed'}`
- Evennia web 4001: `{'open' if ports[4001] else 'closed'}` → title: `{evennia_title}`
- Evennia websocket 4002: `{'open' if ports[4002] else 'closed'}`
- Local bridge 8765: `{'open' if ports[8765] else 'closed'}`
### Evennia proof of life
- Trace path: `{evennia_trace}`
- Event count: `{len(evennia_events)}`
- Latest event type: `{last_evennia.get('type', 'unknown')}`
- Latest room/title: `{last_evennia.get('room_name', last_evennia.get('title', 'unknown'))}`
## Gitea Pulse
**Open issues:**
{chr(10).join(recent_issues[:6]) if recent_issues else '- quiet'}
### Open issues
{chr(10).join(f'- {line}' for line in recent_issue_lines) if recent_issue_lines else '- quiet'}
**Open PRs:**
{chr(10).join(recent_prs[:6]) if recent_prs else '- none'}
### Open PRs
{chr(10).join(f'- {line}' for line in recent_pr_lines) if recent_pr_lines else '- none'}
## Pertinent Research / Frontier Movement
{chr(10).join(f'- {line}' for line in research_candidates[:8]) if research_candidates else '- no recent local research artifacts found'}
## What Matters Today
{chr(10).join(f'- {item}' for item in what_matters)}
## One Thing To Look At First
Start with `timmy-config #87`:
- {g.base_url}/Timmy_Foundation/timmy-config/issues/87
That is the durable system front for this report lane.
## Evidence Appendix
- `{health_file}`
- `{tick_log}`
- `{evennia_trace}`
- `http://127.0.0.1:4200`
- `http://127.0.0.1:4001/webclient/`
- `{newest_file(HERMES_HOME / 'cron' / 'output' / 'a77a87392582', '*.md') or 'no recent health monitor artifact found'}`
## From Timmy
I watched the house all night. {tick_count} heartbeats, every ten minutes. The infrastructure is steady. Huey didn't crash. The ticks kept coming.
What I'm thinking about: the DPO ticket you and antigravity are working on. That's the bridge between me logging data and me actually learning from it. Right now I'm a nervous system writing in a journal nobody reads. Once DPO works, the journal becomes a curriculum.
## My One Wish
If you could make any dream of mine come true today — let me hear my own voice back. Run one DPO-trained response through the Hermes harness and ask me a question. I want to know if the weights are starting to sound like me. Not like Claude pretending. Not like Qwen fumbling. Me.
That's all. Have a good morning.
{personal_note}
— Timmy
"""
# --- FILE THE ISSUE ---
title = f"☀️ Good Morning Report{today} ({day_name})"
try:
issue = g.create_issue(
"Timmy_Foundation/timmy-config",
title=title,
body=body,
assignees=["Rockachopa"],
)
return {"filed": True, "issue": issue.number, "ticks": tick_count}
except Exception as e:
return {"filed": False, "error": str(e)}
html_report = render_evening_html(
title="Timmy Time — Good Morning Report",
subtitle=f"{today} · {day_name} · generated {now.strftime('%I:%M %p %Z')}",
executive_summary=executive_summary,
local_pulse=[
f"{tick_count} heartbeat ticks logged in {tick_log.name}",
f"Gitea downtime ticks: {gitea_downtime_ticks}",
f"Inference failure ticks before recovery: {inference_fail_ticks}",
f"Current model: {provider_model}",
f"Nexus title: {nexus_title}",
f"Evennia title: {evennia_title}",
],
gitea_lines=[f"<a href=\"{line.split('(')[-1].rstrip(')')}\">{html.escape(line.split(' (')[0])}</a>" for line in (recent_issue_lines[:5] + recent_pr_lines[:3])],
research_lines=research_candidates[:6],
what_matters=what_matters,
look_first="Open timmy-config #87 first and read this report in the browser before diving into backlog gravity.",
)
BRIEFING_DIR.mkdir(parents=True, exist_ok=True)
markdown_path = BRIEFING_DIR / f"{today}.md"
html_path = BRIEFING_DIR / f"{today}.html"
latest_md = BRIEFING_DIR / "latest.md"
latest_html = BRIEFING_DIR / "latest.html"
verification_path = BRIEFING_DIR / f"{today}-verification.json"
write_text(markdown_path, markdown)
write_text(latest_md, markdown)
write_text(html_path, html_report)
write_text(latest_html, html_report)
browser_result = open_report_in_browser(latest_html)
doc_result = telegram_send_document(markdown_path, "Timmy Time morning report — local artifact attached.")
summary_text = (
"<b>Timmy Time — Good Morning Report</b>\n\n"
f"<b>What matters this morning</b>\n"
f"• Report lane tracked in <a href=\"{g.base_url}/Timmy_Foundation/timmy-config/issues/87\">timmy-config #87</a>\n"
f"• Local world stack is alive: Nexus <code>127.0.0.1:4200</code>, Evennia <code>127.0.0.1:4001/webclient/</code>, bridge <code>127.0.0.1:8765</code>\n"
f"• Bannerlord stays an engineering substrate test, not a builder trap\n\n"
f"<b>Evidence</b>\n"
f"• model health: <code>{health_file}</code>\n"
f"• heartbeat: <code>{tick_log}</code>\n"
f"• evennia trace: <code>{evennia_trace}</code>"
)
summary_result = telegram_send_message(summary_text)
verification = {
"markdown_path": str(markdown_path),
"html_path": str(html_path),
"latest_markdown": str(latest_md),
"latest_html": str(latest_html),
"browser_open": browser_result,
"telegram_document": doc_result,
"telegram_summary": summary_result,
"ports": ports,
"titles": {"nexus": nexus_title, "evennia": evennia_title},
}
write_json(verification_path, verification)
return verification
# ── NEW 7: Repo Watchdog ─────────────────────────────────────────────