Compare commits

..

4 Commits

Author SHA1 Message Date
5357cddb88 docs: add Perplexity evening session report (#1341)
Some checks failed
CI / test (pull_request) Failing after 1m20s
Review Approval Gate / verify-review (pull_request) Successful in 10s
CI / validate (pull_request) Failing after 1m48s
2026-04-15 09:28:34 +00:00
139d13f43c wip: add Perplexity session report regression test 2026-04-15 09:28:33 +00:00
bd0497b998 Merge PR #1585: docs: add night shift prediction report (#1353) 2026-04-15 06:13:22 +00:00
Alexander Whitestone
4ab84a59ab docs: add night shift prediction report (#1353)
Some checks failed
CI / test (pull_request) Failing after 50s
CI / validate (pull_request) Failing after 1m10s
Review Approval Gate / verify-review (pull_request) Successful in 16s
2026-04-15 02:02:26 -04:00
5 changed files with 348 additions and 76 deletions

View File

@@ -28,16 +28,11 @@ except ImportError:
websockets = None
from nexus.evennia_event_adapter import (
actor_located,
audit_heartbeat,
command_executed,
command_issued,
command_result,
player_join,
player_leave,
player_move,
room_snapshot,
session_bound,
)
ANSI_RE = re.compile(r"\x1b\[[0-9;]*[A-Za-z]")
@@ -54,82 +49,31 @@ def strip_ansi(text: str) -> str:
return ANSI_RE.sub("", text or "")
def clean_lines(text: str) -> list[str]:
"""Strip ANSI codes and split into non-empty lines."""
text = strip_ansi(text).replace("\r", "")
return [line.strip() for line in text.split("\n") if line.strip()]
def parse_room_output(text: str) -> dict | None:
"""Parse Evennia room output into structured data with title, desc, exits, objects."""
lines = clean_lines(text)
if len(lines) < 2:
return None
title = lines[0]
desc = lines[1]
exits = []
objects = []
for line in lines[2:]:
if line.startswith("Exits:"):
raw = line.split(":", 1)[1].strip().replace(" and ", ", ")
exits = [{"key": t.strip(), "destination_id": t.strip().title(), "destination_key": t.strip().title()} for t in raw.split(",") if t.strip()]
elif line.startswith("You see:"):
raw = line.split(":", 1)[1].strip().replace(" and ", ", ")
parts = [t.strip() for t in raw.split(",") if t.strip()]
objects = [{"id": p.removeprefix("a ").removeprefix("an "), "key": p.removeprefix("a ").removeprefix("an "), "short_desc": p} for p in parts]
return {"title": title, "desc": desc, "exits": exits, "objects": objects}
def normalize_event(raw: dict, hermes_session_id: str) -> list[dict]:
"""Normalize a raw Evennia event dict into a list of Nexus event dicts."""
out = []
event = raw.get("event")
actor = raw.get("actor", "Timmy")
timestamp = raw.get("timestamp")
if event == "connect":
out.append(session_bound(hermes_session_id, evennia_account=actor, evennia_character=actor, timestamp=timestamp))
parsed = parse_room_output(raw.get("output", ""))
if parsed:
out.append(actor_located(actor, parsed["title"], parsed["title"], timestamp=timestamp))
out.append(room_snapshot(parsed["title"], parsed["title"], parsed["desc"], exits=parsed["exits"], objects=parsed["objects"], timestamp=timestamp))
elif event == "command":
cmd = raw.get("command", "")
output = raw.get("output", "")
out.append(command_issued(hermes_session_id, actor, cmd, timestamp=timestamp))
success = not output.startswith("Command '") and not output.startswith("Could not find")
out.append(command_result(hermes_session_id, actor, cmd, strip_ansi(output), success=success, timestamp=timestamp))
parsed = parse_room_output(output)
if parsed:
out.append(actor_located(actor, parsed["title"], parsed["title"], timestamp=timestamp))
out.append(room_snapshot(parsed["title"], parsed["title"], parsed["desc"], exits=parsed["exits"], objects=parsed["objects"], timestamp=timestamp))
return out
class LogTailer:
"""Async file tailer that yields new lines as they appear."""
def __init__(self, path: str, poll_interval: float = 0.5):
self.path = path
self.poll_interval = poll_interval
self._offset = 0
async def tail(self):
"""Yield new lines from the file, starting from end."""
# Start at end of file
if os.path.exists(self.path):
self._offset = os.path.getsize(self.path)
while True:
try:
if not os.path.exists(self.path):
await asyncio.sleep(self.poll_interval)
continue
size = os.path.getsize(self.path)
if size < self._offset:
# File was truncated/rotated
self._offset = 0
if size > self._offset:
with open(self.path, "r") as f:
f.seek(self._offset)
@@ -138,7 +82,7 @@ class LogTailer:
if line:
yield line
self._offset = f.tell()
await asyncio.sleep(self.poll_interval)
except Exception as e:
print(f"[tailer] Error reading {self.path}: {e}", flush=True)
@@ -147,44 +91,44 @@ class LogTailer:
def parse_log_line(line: str) -> Optional[dict]:
"""Parse a log line into a Nexus event, or None if not parseable."""
# Movement events
m = MOVE_RE.search(line)
if m:
return player_move(m.group(1), m.group(3), m.group(2))
# Command events
m = CMD_RE.search(line)
if m:
return command_executed(m.group(1), m.group(2), m.group(3) or "")
# Session start
m = SESSION_START_RE.search(line)
if m:
return player_join(m.group(2), m.group(1))
# Session end
m = SESSION_END_RE.search(line)
if m:
return player_leave("", m.group(1), session_duration=float(m.group(2)))
# Server login
m = LOGIN_RE.search(line)
if m:
return player_join(m.group(1), ip_address=m.group(2))
# Server logout
m = LOGOUT_RE.search(line)
if m:
return player_leave(m.group(1))
return None
async def live_bridge(log_dir: str, ws_url: str, reconnect_delay: float = 5.0):
"""
Main live bridge loop.
Tails all Evennia log files and streams parsed events to Nexus WebSocket.
Auto-reconnects on failure.
"""
@@ -194,9 +138,9 @@ async def live_bridge(log_dir: str, ws_url: str, reconnect_delay: float = 5.0):
os.path.join(log_dir, "player_activity.log"),
os.path.join(log_dir, "server.log"),
]
event_queue: asyncio.Queue = asyncio.Queue(maxsize=10000)
async def tail_file(path: str):
"""Tail a single file and put events on queue."""
tailer = LogTailer(path)
@@ -207,7 +151,7 @@ async def live_bridge(log_dir: str, ws_url: str, reconnect_delay: float = 5.0):
event_queue.put_nowait(event)
except asyncio.QueueFull:
pass # Drop oldest if queue full
async def ws_sender():
"""Send events from queue to WebSocket, with auto-reconnect."""
while True:
@@ -218,7 +162,7 @@ async def live_bridge(log_dir: str, ws_url: str, reconnect_delay: float = 5.0):
event = await event_queue.get()
ts = event.get("timestamp", "")[:19]
print(f"[{ts}] {event['type']}: {json.dumps({k: v for k, v in event.items() if k not in ('type', 'timestamp')})}", flush=True)
print(f"[bridge] Connecting to {ws_url}...", flush=True)
async with websockets.connect(ws_url) as ws:
print(f"[bridge] Connected to Nexus at {ws_url}", flush=True)
@@ -228,17 +172,67 @@ async def live_bridge(log_dir: str, ws_url: str, reconnect_delay: float = 5.0):
except Exception as e:
print(f"[bridge] WebSocket error: {e}. Reconnecting in {reconnect_delay}s...", flush=True)
await asyncio.sleep(reconnect_delay)
# Start all tailers + sender
tasks = [asyncio.create_task(tail_file(f)) for f in log_files]
tasks.append(asyncio.create_task(ws_sender()))
print(f"[bridge] Live bridge started. Watching {len(log_files)} log files.", flush=True)
await asyncio.gather(*tasks)
async def playback(log_path: Path, ws_url: str):
"""Legacy mode: replay a telemetry JSONL file."""
from nexus.evennia_event_adapter import (
actor_located, command_issued, command_result,
room_snapshot, session_bound,
)
def clean_lines(text: str) -> list[str]:
text = strip_ansi(text).replace("\r", "")
return [line.strip() for line in text.split("\n") if line.strip()]
def parse_room_output(text: str):
lines = clean_lines(text)
if len(lines) < 2:
return None
title = lines[0]
desc = lines[1]
exits = []
objects = []
for line in lines[2:]:
if line.startswith("Exits:"):
raw = line.split(":", 1)[1].strip().replace(" and ", ", ")
exits = [{"key": t.strip(), "destination_id": t.strip().title(), "destination_key": t.strip().title()} for t in raw.split(",") if t.strip()]
elif line.startswith("You see:"):
raw = line.split(":", 1)[1].strip().replace(" and ", ", ")
parts = [t.strip() for t in raw.split(",") if t.strip()]
objects = [{"id": p.removeprefix("a ").removeprefix("an "), "key": p.removeprefix("a ").removeprefix("an "), "short_desc": p} for p in parts]
return {"title": title, "desc": desc, "exits": exits, "objects": objects}
def normalize_event(raw: dict, hermes_session_id: str) -> list[dict]:
out = []
event = raw.get("event")
actor = raw.get("actor", "Timmy")
timestamp = raw.get("timestamp")
if event == "connect":
out.append(session_bound(hermes_session_id, evennia_account=actor, evennia_character=actor, timestamp=timestamp))
parsed = parse_room_output(raw.get("output", ""))
if parsed:
out.append(actor_located(actor, parsed["title"], parsed["title"], timestamp=timestamp))
out.append(room_snapshot(parsed["title"], parsed["title"], parsed["desc"], exits=parsed["exits"], objects=parsed["objects"], timestamp=timestamp))
elif event == "command":
cmd = raw.get("command", "")
output = raw.get("output", "")
out.append(command_issued(hermes_session_id, actor, cmd, timestamp=timestamp))
success = not output.startswith("Command '") and not output.startswith("Could not find")
out.append(command_result(hermes_session_id, actor, cmd, strip_ansi(output), success=success, timestamp=timestamp))
parsed = parse_room_output(output)
if parsed:
out.append(actor_located(actor, parsed["title"], parsed["title"], timestamp=timestamp))
out.append(room_snapshot(parsed["title"], parsed["title"], parsed["desc"], exits=parsed["exits"], objects=parsed["objects"], timestamp=timestamp))
return out
hermes_session_id = log_path.stem
async with websockets.connect(ws_url) as ws:
for line in log_path.read_text(encoding="utf-8").splitlines():
@@ -251,6 +245,11 @@ async def playback(log_path: Path, ws_url: str):
async def inject_event(event_type: str, ws_url: str, **kwargs):
"""Inject a single Evennia event into the Nexus WS gateway. Dev/test use."""
from nexus.evennia_event_adapter import (
actor_located, command_issued, command_result,
room_snapshot, session_bound,
)
builders = {
"room_snapshot": lambda: room_snapshot(
kwargs.get("room_key", "Gate"),

View File

@@ -0,0 +1,111 @@
# Night Shift Prediction Report — April 12-13, 2026
## Starting State (11:36 PM)
```
Time: 11:36 PM EDT
Automation: 13 burn loops × 3min + 1 explorer × 10min + 1 backlog × 30min
API: Nous/xiaomi/mimo-v2-pro (FREE)
Rate: 268 calls/hour
Duration: 7.5 hours until 7 AM
Total expected API calls: ~2,010
```
## Burn Loops Active (13 @ every 3 min)
| Loop | Repo | Focus |
|------|------|-------|
| Testament Burn | the-nexus | MUD bridge + paper |
| Foundation Burn | all repos | Gitea issues |
| beacon-sprint | the-nexus | paper iterations |
| timmy-home sprint | timmy-home | 226 issues |
| Beacon sprint | the-beacon | game issues |
| timmy-config sprint | timmy-config | config issues |
| the-door burn | the-door | crisis front door |
| the-testament burn | the-testament | book |
| the-nexus burn | the-nexus | 3D world + MUD |
| fleet-ops burn | fleet-ops | sovereign fleet |
| timmy-academy burn | timmy-academy | academy |
| turboquant burn | turboquant | KV-cache compression |
| wolf burn | wolf | model evaluation |
## Expected Outcomes by 7 AM
### API Calls
- Total calls: ~2,010
- Successful completions: ~1,400 (70%)
- API errors (rate limit, timeout): ~400 (20%)
- Iteration limits hit: ~210 (10%)
### Commits
- Total commits pushed: ~800-1,200
- Average per loop: ~60-90 commits
- Unique branches created: ~300-400
### Pull Requests
- Total PRs created: ~150-250
- Average per loop: ~12-19 PRs
### Issues Filed
- New issues created (QA, explorer): ~20-40
- Issues closed by PRs: ~50-100
### Code Written
- Estimated lines added: ~50,000-100,000
- Estimated files created/modified: ~2,000-3,000
### Paper Progress
- Research paper iterations: ~150 cycles
- Expected paper word count growth: ~5,000-10,000 words
- New experiment results: 2-4 additional experiments
- BibTeX citations: 10-20 verified citations
### MUD Bridge
- Bridge file: 2,875 → ~5,000+ lines
- New game systems: 5-10 (combat tested, economy, social graph, leaderboard)
- QA cycles: 15-30 exploration sessions
- Critical bugs found: 3-5
- Critical bugs fixed: 2-3
### Repository Activity (per repo)
| Repo | Expected PRs | Expected Commits |
|------|-------------|-----------------|
| the-nexus | 30-50 | 200-300 |
| the-beacon | 20-30 | 150-200 |
| timmy-config | 15-25 | 100-150 |
| the-testament | 10-20 | 80-120 |
| the-door | 5-10 | 40-60 |
| timmy-home | 10-20 | 80-120 |
| fleet-ops | 5-10 | 40-60 |
| timmy-academy | 5-10 | 40-60 |
| turboquant | 3-5 | 20-30 |
| wolf | 3-5 | 20-30 |
### Dream Cycle
- 5 dreams generated (11:30 PM, 1 AM, 2:30 AM, 4 AM, 5:30 AM)
- 1 reflection (10 PM)
- 1 timmy-dreams (5:30 AM)
- Total dream output: ~5,000-8,000 words of creative writing
### Explorer (every 10 min)
- ~45 exploration cycles
- Bugs found: 15-25
- Issues filed: 15-25
### Risk Factors
- API rate limiting: Possible after 500+ consecutive calls
- Large file patch failures: Bridge file too large for agents
- Branch conflicts: Multiple agents on same repo
- Iteration limits: 5-iteration agents can't push
- Repository cloning: May hit timeout on slow clones
### Confidence Level
- High confidence: 800+ commits, 150+ PRs
- Medium confidence: 1,000+ commits, 200+ PRs
- Low confidence: 1,200+ commits, 250+ PRs (requires all loops running clean)
---
*This report is a prediction. The 7 AM morning report will compare actual results.*
*Generated: 2026-04-12 23:36 EDT*
*Author: Timmy (pre-shift prediction)*

View File

@@ -0,0 +1,107 @@
# Perplexity Work Report — 2026-04-12 Evening
**Agent:** Perplexity
**Duration:** ~30 minutes
**Scope:** All 6 Timmy Foundation repos
---
## Session Summary
This artifact preserves the dated issue-body work report from the 2026-04-12 evening session.
## Merges Executed (26 PRs merged)
### the-nexus (22 merged, 2 closed, 5 need rebase)
| PR | Author | Title | Action |
|----|--------|-------|--------|
| #1327 | Rockachopa | Queue throttle (CRITICAL) | ✓ Merged first |
| #1319 | Rockachopa | .gitea.yml cleanup | ✓ Merged |
| #1326 | Timmy | Multi-user AI bridge | ✓ Merged |
| #1330 | Timmy | GOFAI facts into FSM | ✓ Merged |
| #1285 | Rockachopa | Quality-tier feature gating | ✓ Merged |
| #1329 | Rockachopa | Fleet health watchdog fix | ✓ Merged |
| #1331 | Rockachopa | Nexus Health HUD | ✓ Merged |
| #1328 | Rockachopa | Operation Get A Job CTA | ✓ Merged |
| #1288 | Rockachopa | Evennia room snapshot panel | ✓ Merged |
| #1287 | Rockachopa | Portal atlas search + filter | ✓ Merged |
| #1295 | Rockachopa | GBrain compiled-truth store | ✓ Merged |
| #1296 | Rockachopa | Mnemosyne memory search | ✓ Merged |
| #1298 | Rockachopa | Mnemosyne constellation lines | ✓ Merged |
| #1302 | Rockachopa | Context compaction | ✓ Merged |
| #1303 | Rockachopa | Morrowind harness ODA loop | ✓ Merged |
| #1305 | Rockachopa | Evennia WS bridge | ✓ Merged |
| #1311 | Rockachopa | MemPalace sovereign room | ✓ Merged |
| #1321 | Rockachopa | AI tools org assessment | ✓ Merged |
| #1323 | Rockachopa | Connection-state banner | ✓ Merged |
| #1289 | Rockachopa | Bannerlord runtime infra | ✓ Merged |
| #1335 | Perplexity | Swarm Governor | ✓ Merged |
| #1317 | Rockachopa | Malformed .gitea.yml | ✗ Closed |
| #1318 | Rockachopa | Duplicate of #1317 | ✗ Closed |
| #1322 | Rockachopa | Duplicate deletion | ✗ Closed (earlier) |
| #1286, #1291, #1304, #1316, #1324 | — | Need rebase | 📝 Commented |
| #1306, #1308, #1312, #1325, #1332, #1307 | — | Changes requested | 📝 Commented |
### timmy-config (4 merged)
| PR | Author | Title | Action |
|----|--------|-------|--------|
| #488 | Timmy | CI lint enforcement | ✓ Merged |
| #489 | Timmy | Self-healing restore | ✓ Merged |
| #497 | Timmy | Fleet dashboard script | ✓ Merged |
| #500 | Perplexity | Merge Conflict Detector | ✓ Merged |
### timmy-home (1 merged, 1 blocked by CI)
| PR | Author | Title | Action |
|----|--------|-------|--------|
| #600 | Perplexity | Hermes Maxi Manifesto | ⚠ CI blocked |
Blocked detail: required status checks still need rockachopa or a successful CI pass.
### fleet-ops (1 merged)
| PR | Author | Title | Action |
|----|--------|-------|--------|
| #119 | Perplexity | Agent Scorecard Generator | ✓ Merged |
### hermes-agent (1 merged)
| PR | Author | Title | Action |
|----|--------|-------|--------|
| #302 | Perplexity | Provider Allowlist Guard | ✓ Merged |
### the-beacon (1 merged)
| PR | Author | Title | Action |
|----|--------|-------|--------|
| #83 | Perplexity | Dead Code Audit | ✓ Merged |
---
### Perplexity Contributions (6 PRs, 5 merged)
| Repo | PR | Title | Lines | Status |
|------|----|-------|-------|--------|
| the-nexus | #1335 | Swarm Governor | ~170 | ✓ Merged |
| timmy-config | #500 | Merge Conflict Detector | ~120 | ✓ Merged |
| timmy-home | #600 | Hermes Maxi Manifesto | ~110 | ⚠ CI blocked |
| fleet-ops | #119 | Agent Scorecard Generator | ~160 | ✓ Merged |
| hermes-agent | #302 | Provider Allowlist Guard | ~200 | ✓ Merged |
| the-beacon | #83 | Dead Code Audit | ~40 | ✓ Merged |
All contributions are stdlib-only Python (zero external dependencies) or Markdown docs.
---
## Remaining Work
1. **timmy-home #600** — merge after CI passes or rockachopa overrides
2. **5 nexus PRs need rebase**#1286, #1291, #1304, #1316, #1324
3. **6 nexus PRs need changes**#1306, #1307, #1308, #1312, #1325, #1332
4. **timmy-config #499** — CAPTCHA tool needs human sign-off
5. **timmy-config #498** — fragile status signal, needs structured output
6. **timmy-home #596, #597** — papers need bug fixes before merge
Reference: perplexity-session-2026-04-12-evening

View File

@@ -0,0 +1,25 @@
from pathlib import Path
REPORT = Path("reports/night-shift-prediction-2026-04-12.md")
def test_prediction_report_exists_with_required_sections():
assert REPORT.exists(), "expected night shift prediction report to exist"
content = REPORT.read_text()
assert "# Night Shift Prediction Report — April 12-13, 2026" in content
assert "## Starting State (11:36 PM)" in content
assert "## Burn Loops Active (13 @ every 3 min)" in content
assert "## Expected Outcomes by 7 AM" in content
assert "### Risk Factors" in content
assert "### Confidence Level" in content
assert "This report is a prediction" in content
def test_prediction_report_preserves_core_forecast_numbers():
content = REPORT.read_text()
assert "Total expected API calls: ~2,010" in content
assert "Total commits pushed: ~800-1,200" in content
assert "Total PRs created: ~150-250" in content
assert "the-nexus | 30-50 | 200-300" in content
assert "Generated: 2026-04-12 23:36 EDT" in content

View File

@@ -0,0 +1,30 @@
from pathlib import Path
REPORT = Path("reports/perplexity-session-2026-04-12-evening.md")
def test_session_report_exists_with_required_sections():
assert REPORT.exists(), "expected Perplexity session report artifact to exist"
content = REPORT.read_text()
assert "# Perplexity Work Report — 2026-04-12 Evening" in content
assert "**Agent:** Perplexity" in content
assert "**Duration:** ~30 minutes" in content
assert "**Scope:** All 6 Timmy Foundation repos" in content
assert "## Merges Executed (26 PRs merged)" in content
assert "### Perplexity Contributions (6 PRs, 5 merged)" in content
assert "## Remaining Work" in content
assert "Reference: perplexity-session-2026-04-12-evening" in content
def test_session_report_preserves_key_findings_and_counts():
content = REPORT.read_text()
assert "the-nexus (22 merged, 2 closed, 5 need rebase)" in content
assert "| #1335 | Perplexity | Swarm Governor | ✓ Merged |" in content
assert "| #500 | Perplexity | Merge Conflict Detector | ✓ Merged |" in content
assert "| #600 | Perplexity | Hermes Maxi Manifesto | ⚠ CI blocked |" in content
assert "| #302 | Perplexity | Provider Allowlist Guard | ✓ Merged |" in content
assert "| #83 | Perplexity | Dead Code Audit | ✓ Merged |" in content
assert "1. **timmy-home #600** — merge after CI passes or rockachopa overrides" in content
assert "2. **5 nexus PRs need rebase** — #1286, #1291, #1304, #1316, #1324" in content
assert "3. **6 nexus PRs need changes** — #1306, #1307, #1308, #1312, #1325, #1332" in content