Compare commits

..

1 Commits

Author SHA1 Message Date
ceec22a1e3 fix: add python3 shebang to bin/glitch_patterns.py (#681)
Some checks failed
Architecture Lint / Linter Tests (pull_request) Successful in 39s
PR Checklist / pr-checklist (pull_request) Failing after 7m29s
Architecture Lint / Lint Repository (pull_request) Has been cancelled
2026-04-15 02:56:53 +00:00
2 changed files with 2 additions and 48 deletions

View File

@@ -1,3 +1,4 @@
#!/usr/bin/env python3
"""
Glitch pattern definitions for 3D world anomaly detection.

View File

@@ -1,53 +1,6 @@
"""Sovereign orchestration — Huey replaces 3,843 lines of homebrew."""
import json
import os
from datetime import datetime, timezone
from huey import SqliteHuey, crontab
from pathlib import Path
from huey import SqliteHuey, signals
huey = SqliteHuey(filename=str(Path.home() / ".hermes" / "orchestration.db"))
# === Token Tracking ===
TOKEN_LOG = Path.home() / ".hermes" / "token_usage.jsonl"
def log_token_usage(task_name, result):
"""Log token usage from a completed pipeline task.
Reads input_tokens/output_tokens from the agent result dict.
Auto-detects pipeline name from task context.
Appends to JSONL for downstream analysis.
"""
if not isinstance(result, dict):
return
input_tokens = result.get("input_tokens", 0)
output_tokens = result.get("output_tokens", 0)
if input_tokens == 0 and output_tokens == 0:
return
# Auto-detect pipeline name from task function name
pipeline = task_name.replace("_task", "").replace("_", "-")
entry = {
"timestamp": datetime.now(timezone.utc).isoformat(),
"pipeline": pipeline,
"input_tokens": input_tokens,
"output_tokens": output_tokens,
"total_tokens": input_tokens + output_tokens,
"task": task_name,
}
TOKEN_LOG.parent.mkdir(parents=True, exist_ok=True)
with open(TOKEN_LOG, "a") as f:
f.write(json.dumps(entry) + "\n")
@huey.signal(signals.SIGNAL_COMPLETE)
def on_task_complete(signal, task, task_value=None, **kwargs):
"""Huey hook: log token usage after each pipeline task completes."""
task_name = getattr(task, "name", "unknown")
log_token_usage(task_name, task_value)