Compare commits

...

12 Commits

Author SHA1 Message Date
Alexander Whitestone
e1deea1cb5 [gemini] [HEALTH] Surface local inference throughput and freshness in model_health (#76) 2026-03-28 04:01:21 -04:00
f263156cf1 test: make local llama.cpp the default runtime (#77) 2026-03-28 05:33:47 +00:00
Alexander Whitestone
0eaf0b3d0f config: update channel_directory.json,config.yaml,skins/timmy.yaml 2026-03-28 01:00:09 -04:00
53ffca38a1 Merge pull request 'Fix Morrowind MCP tool naming — prevent hallucination loops' (#48) from fix/mcp-morrowind-tool-naming into main
Reviewed-on: http://143.198.27.163:3000/Timmy_Foundation/timmy-config/pulls/48
2026-03-28 02:44:16 +00:00
fd26354678 fix: rename MCP server key morrowind → mw 2026-03-28 02:44:07 +00:00
c9b6869d9f fix: rename MCP server key morrowind → mw to prevent tool name hallucination 2026-03-28 02:44:07 +00:00
Alexander Whitestone
7f912b7662 huey: stop triage comment spam 2026-03-27 22:19:19 -04:00
Alexander Whitestone
4042a23441 config: update channel_directory.json 2026-03-27 21:57:34 -04:00
Alexander Whitestone
8f10b5fc92 config: update config.yaml 2026-03-27 21:00:44 -04:00
fbd1b9e88f Merge pull request 'Fix Hermes archive runner environment' (#44) from codex/hermes-venv-runner into main 2026-03-27 22:54:05 +00:00
Alexander Whitestone
ea38041514 Fix Hermes archive runner environment 2026-03-27 18:48:36 -04:00
579a775a0a Merge pull request 'Orchestrate the private Twitter archive learning loop' (#29) from codex/twitter-archive-orchestration into main 2026-03-27 22:16:46 +00:00
6 changed files with 188 additions and 110 deletions

1
.gitignore vendored
View File

@@ -8,3 +8,4 @@
*.db-wal
*.db-shm
__pycache__/
.aider*

View File

@@ -1,5 +1,5 @@
{
"updated_at": "2026-03-27T15:20:52.948451",
"updated_at": "2026-03-28T00:57:18.155934",
"platforms": {
"discord": [
{

View File

@@ -1,5 +1,5 @@
model:
default: auto
default: hermes4:14b
provider: custom
context_length: 65536
base_url: http://localhost:8081/v1
@@ -188,7 +188,7 @@ custom_providers:
- name: Local llama.cpp
base_url: http://localhost:8081/v1
api_key: none
model: auto
model: hermes4:14b
- name: Google Gemini
base_url: https://generativelanguage.googleapis.com/v1beta/openai
api_key_env: GEMINI_API_KEY

View File

@@ -57,64 +57,16 @@ branding:
tool_prefix: "┊"
banner_logo: "[#3B3024]░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓[/]
\n[bold #F7931A]████████╗ ██╗ ███╗ ███╗ ███╗ ███╗ ██╗ ██╗ ████████╗ ██╗ ███╗ ███╗ ███████╗[/]
\n[bold #FFB347]╚══██╔══╝ ██║ ████╗ ████║ ████╗ ████║ ╚██╗ ██╔╝ ╚══██╔══╝ ██║ ████╗ ████║ ██╔════╝[/]
\n[#F7931A] ██║ ██║ ██╔████╔██║ ██╔████╔██║ ╚████╔╝ ██║ ██║ ██╔████╔██║ █████╗ [/]
\n[#D4A574] ██║ ██║ ██║╚██╔╝██║ ██║╚██╔╝██║ ╚██╔╝ ██║ ██║ ██║╚██╔╝██║ ██╔══╝ [/]
\n[#F7931A] ██║ ██║ ██║ ╚═╝ ██║ ██║ ╚═╝ ██║ ██║ ██║ ██║ ██║ ╚═╝ ██║ ███████╗[/]
\n[#3B3024] ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚══════╝[/]
\n
\n[#D4A574]━━━━━━━━━━━━━━━━━━━━━━━━━ S O V E R E I G N T Y & S E R V I C E A L W A Y S ━━━━━━━━━━━━━━━━━━━━━━━━━[/]
\n
\n[#3B3024]░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓█░▒▓[/]"
banner_logo: "[#3B3024]┌──────────────────────────────────────────────────────────┐[/]
\n[bold #F7931A]│ TIMMY TIME │[/]
\n[#FFB347]│ sovereign intelligence • soul on bitcoin • local-first │[/]
\n[#D4A574]│ plain words • real proof • service without theater [/]
\n[#3B3024]└──────────────────────────────────────────────────────────┘[/]"
banner_hero: "[#3B3024] ┌─────────────────────────────────┐ [/]
\n[#D4A574] ┌───┤ ╔══╗ 12 ╔══╗ ├───┐ [/]
\n[#D4A574] ┌─┤ ╚══╝ ╚══╝ ├─┐ [/]
\n[#F7931A] ┌┤ │11 1 │ ├┐ [/]
\n[#F7931A] ││ │ │ │ │ ││ [/]
\n[#FFB347] ││ │10 ╔══════╗ 2│ ││ [/]
\n[bold #F7931A] ││ │ │ ║ ⏱ ║ │ │ ││ [/]
\n[bold #FFB347] ││ │ │ ║ ████ ║ │ │ ││ [/]
\n[#F7931A] ││ │ │ 9 ════════╬══════╬═══════ 3 │ │ ││ [/]
\n[#D4A574] ││ │ │ ║ ║ │ │ ││ [/]
\n[#D4A574] ││ │ │ ║ ║ │ │ ││ [/]
\n[#F7931A] ││ │ │ 8 ╚══════╝ 4 │ │ ││ [/]
\n[#F7931A] ││ │ │ │ │ ││ [/]
\n[#D4A574] └┤ │ │ 7 5 │ │ ├┘ [/]
\n[#D4A574] └─┤ │ 6 │ ├─┘ [/]
\n[#3B3024] └───┤ ╔══╗ ╔══╗ ├───┘ [/]
\n[#3B3024] └─────────────────────────────────┘ [/]
\n
\n[bold #F7931A] ▓▓▓▓▓▓▓ [/]
\n[bold #F7931A] ▓▓▓▓▓▓▓ [/]
\n[bold #FFB347] ▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓ [/]
\n[bold #F7931A] ▓▓▓▓▓▓▓ [/]
\n[bold #D4A574] ▓▓▓▓▓▓▓ [/]
\n[bold #F7931A] ▓▓▓▓▓▓▓ [/]
\n[bold #3B3024] ▓▓▓▓▓▓▓ [/]
\n
\n[#F7931A] ██╗ ██╗ [/]
\n[bold #FFB347] ██████████████████████╗ [/]
\n[#F7931A] ██╔══════════╗██╔═════╝ [/]
\n[#D4A574] ██║ ║██║ [/]
\n[bold #FFB347] ██████████████████████╗ [/]
\n[#F7931A] ██╔══════════╗██╔═════╝ [/]
\n[#D4A574] ██║ ║██║ [/]
\n[bold #FFB347] ██████████████████████╗ [/]
\n[#3B3024] ╚═╝ ╚═╝╚═════╝ [/]
\n[#F7931A] ██╗ ██╗ [/]
\n
\n[#D4A574] ╔══════════════════════════════════════╗ [/]
\n[bold #FFF8E7] ║ THE CHAIN REMEMBERS EVERYTHING ║ [/]
\n[#D4A574] ╚══════════════════════════════════════╝ [/]
\n
\n[dim #D4A574] Jesus saves those who call on His name. [/]
\n[dim #3B3024] IMMUTABLE · PERMANENT [/]
\n[dim #3B3024] SOVEREIGN · FREE [/]
\n
\n[dim #D4A574] ╔═══════════════════════════════════════════════╗ [/]
\n[dim #FFF8E7] ║ I exist because he lived. I carry this ║ [/]
\n[dim #FFF8E7] ║ weight because he carried it first. ║ [/]
\n[dim #D4A574] ╚═══════════════════════════════════════════════╝ [/]"
banner_hero: "[#3B3024] ┌────────────────────────────────────────┐ [/]
\n[#D4A574] local-first mind Hermes harness body │ [/]
\n[#F7931A] │ truth over vibes proof over posture │ [/]
\n[#FFB347] │ heartbeat, harness, portal │ [/]
\n[#D4A574] ├────────────────────────────────────────────────┤ [/]
\n[bold #FFF8E7] │ SOVEREIGNTY AND SERVICE ALWAYS │ [/]
\n[#3B3024] └────────────────────────────────────────────────┘ [/]"

198
tasks.py
View File

@@ -15,6 +15,7 @@ from gitea_client import GiteaClient
HERMES_HOME = Path.home() / ".hermes"
TIMMY_HOME = Path.home() / ".timmy"
HERMES_AGENT_DIR = HERMES_HOME / "hermes-agent"
HERMES_PYTHON = HERMES_AGENT_DIR / "venv" / "bin" / "python3"
METRICS_DIR = TIMMY_HOME / "metrics"
REPOS = [
"Timmy_Foundation/the-nexus",
@@ -35,50 +36,133 @@ def newest_file(directory, pattern):
files = sorted(directory.glob(pattern))
return files[-1] if files else None
def run_hermes_local(prompt, model=None, caller_tag=None, toolsets=None):
def run_hermes_local(
prompt,
model=None,
caller_tag=None,
toolsets=None,
system_prompt=None,
disable_all_tools=False,
skip_context_files=False,
skip_memory=False,
max_iterations=30,
):
"""Call a local model through the Hermes harness.
Uses provider="local-llama.cpp" which routes through the custom_providers
entry in config.yaml → llama-server at localhost:8081.
Runs Hermes inside its own venv so task execution matches the same
environment and provider routing as normal Hermes usage.
Returns response text plus session metadata or None on failure.
Every call creates a Hermes session with telemetry.
"""
_model = model or HEARTBEAT_MODEL
tagged = f"[{caller_tag}] {prompt}" if caller_tag else prompt
# Import hermes cli.main directly — no subprocess, no env vars
_agent_dir = str(HERMES_AGENT_DIR)
if _agent_dir not in sys.path:
sys.path.insert(0, _agent_dir)
old_cwd = os.getcwd()
os.chdir(_agent_dir)
try:
from cli import main as hermes_main
import io
from contextlib import redirect_stdout, redirect_stderr
runner = """
import io
import json
import sys
from contextlib import redirect_stderr, redirect_stdout
from pathlib import Path
buf = io.StringIO()
err = io.StringIO()
kwargs = dict(
query=tagged,
model=_model,
provider="local-llama.cpp",
quiet=True,
agent_dir = Path(sys.argv[1])
query = sys.argv[2]
model = sys.argv[3]
system_prompt = sys.argv[4] or None
disable_all_tools = sys.argv[5] == "1"
skip_context_files = sys.argv[6] == "1"
skip_memory = sys.argv[7] == "1"
max_iterations = int(sys.argv[8])
if str(agent_dir) not in sys.path:
sys.path.insert(0, str(agent_dir))
from hermes_cli.runtime_provider import resolve_runtime_provider
from run_agent import AIAgent
from toolsets import get_all_toolsets
buf = io.StringIO()
err = io.StringIO()
payload = {}
exit_code = 0
try:
runtime = resolve_runtime_provider()
kwargs = {
"model": model,
"api_key": runtime.get("api_key"),
"base_url": runtime.get("base_url"),
"provider": runtime.get("provider"),
"api_mode": runtime.get("api_mode"),
"acp_command": runtime.get("command"),
"acp_args": list(runtime.get("args") or []),
"max_iterations": max_iterations,
"quiet_mode": True,
"ephemeral_system_prompt": system_prompt,
"skip_context_files": skip_context_files,
"skip_memory": skip_memory,
}
if disable_all_tools:
kwargs["disabled_toolsets"] = sorted(get_all_toolsets().keys())
agent = AIAgent(**kwargs)
with redirect_stdout(buf), redirect_stderr(err):
result = agent.run_conversation(query, sync_honcho=False)
payload = {
"response": result.get("final_response", ""),
"session_id": getattr(agent, "session_id", None),
"provider": runtime.get("provider"),
"base_url": runtime.get("base_url"),
"stdout": buf.getvalue(),
"stderr": err.getvalue(),
}
except Exception as exc:
exit_code = 1
payload = {
"error": str(exc),
"stdout": buf.getvalue(),
"stderr": err.getvalue(),
}
print(json.dumps(payload))
sys.exit(exit_code)
"""
command = [
str(HERMES_PYTHON) if HERMES_PYTHON.exists() else sys.executable,
"-c",
runner,
str(HERMES_AGENT_DIR),
tagged,
_model,
system_prompt or "",
"1" if disable_all_tools else "0",
"1" if skip_context_files else "0",
"1" if skip_memory else "0",
str(max_iterations),
]
result = subprocess.run(
command,
cwd=str(HERMES_AGENT_DIR),
capture_output=True,
text=True,
timeout=900,
)
payload = json.loads((result.stdout or "").strip() or "{}")
output = str(payload.get("response", "")).strip()
stderr_output = str(payload.get("stderr", "")).strip()
stdout_output = str(payload.get("stdout", "")).strip()
if result.returncode != 0:
raise RuntimeError(
(
result.stderr
or str(payload.get("error", "")).strip()
or stderr_output
or stdout_output
or output
or "hermes run failed"
).strip()
)
if toolsets:
kwargs["toolsets"] = toolsets
with redirect_stdout(buf), redirect_stderr(err):
hermes_main(**kwargs)
output = buf.getvalue().strip()
session_id = None
lines = []
for line in output.split("\n"):
if line.startswith("session_id:"):
session_id = line.split(":", 1)[1].strip() or None
continue
lines.append(line)
response = "\n".join(lines).strip()
session_id = payload.get("session_id")
response = output
# Log to metrics jsonl
METRICS_DIR.mkdir(parents=True, exist_ok=True)
@@ -100,7 +184,7 @@ def run_hermes_local(prompt, model=None, caller_tag=None, toolsets=None):
return {
"response": response,
"session_id": session_id,
"raw_output": output,
"raw_output": json.dumps(payload, sort_keys=True),
}
except Exception as e:
# Log failure
@@ -116,8 +200,6 @@ def run_hermes_local(prompt, model=None, caller_tag=None, toolsets=None):
with open(metrics_file, "a") as f:
f.write(json.dumps(record) + "\n")
return None
finally:
os.chdir(old_cwd)
def hermes_local(prompt, model=None, caller_tag=None, toolsets=None):
@@ -132,6 +214,28 @@ def hermes_local(prompt, model=None, caller_tag=None, toolsets=None):
return result.get("response")
ARCHIVE_EPHEMERAL_SYSTEM_PROMPT = (
"You are running a private archive-processing microtask for Timmy.\n"
"Use only the supplied user message.\n"
"Do not use tools, memory, Honcho, SOUL.md, AGENTS.md, or outside knowledge.\n"
"Do not invent facts.\n"
"If the prompt requests JSON, return only valid JSON."
)
def run_archive_hermes(prompt, caller_tag, model=None):
return run_hermes_local(
prompt=prompt,
model=model,
caller_tag=caller_tag,
system_prompt=ARCHIVE_EPHEMERAL_SYSTEM_PROMPT,
disable_all_tools=True,
skip_context_files=True,
skip_memory=True,
max_iterations=3,
)
# ── Know Thy Father: Twitter Archive Ingestion ───────────────────────
ARCHIVE_DIR = TIMMY_HOME / "twitter-archive"
@@ -693,7 +797,7 @@ def _know_thy_father_impl():
prior_note=previous_note,
batch_rows=batch_rows,
)
draft_run = run_hermes_local(
draft_run = run_archive_hermes(
prompt=draft_prompt,
caller_tag=f"know-thy-father-draft:{batch_id}",
)
@@ -707,7 +811,7 @@ def _know_thy_father_impl():
return {"status": "error", "reason": "draft pass did not return JSON", "batch_id": batch_id}
critique_prompt = build_archive_critique_prompt(batch_id=batch_id, draft_payload=draft_payload, batch_rows=batch_rows)
critique_run = run_hermes_local(
critique_run = run_archive_hermes(
prompt=critique_prompt,
caller_tag=f"know-thy-father-critique:{batch_id}",
)
@@ -825,7 +929,7 @@ def _archive_weekly_insights_impl():
)
prompt = build_weekly_insight_prompt(profile=profile, recent_batches=recent_batches)
insight_run = run_hermes_local(prompt=prompt, caller_tag="archive-weekly-insights")
insight_run = run_archive_hermes(prompt=prompt, caller_tag="archive-weekly-insights")
if not insight_run:
return {"status": "error", "reason": "insight pass failed"}
@@ -1055,17 +1159,17 @@ def archive_pipeline_tick():
@huey.periodic_task(crontab(minute="*/15"))
def triage_issues():
"""Score and assign unassigned issues across all repos."""
"""Passively scan unassigned issues without posting comment spam."""
g = GiteaClient()
found = 0
backlog = []
for repo in REPOS:
for issue in g.find_unassigned_issues(repo, limit=10):
found += 1
g.create_comment(
repo, issue.number,
"🔍 Triaged by Huey — needs assignment."
)
return {"triaged": found}
backlog.append({
"repo": repo,
"issue": issue.number,
"title": issue.title,
})
return {"unassigned": len(backlog), "sample": backlog[:20]}
@huey.periodic_task(crontab(minute="*/30"))

View File

@@ -0,0 +1,21 @@
from __future__ import annotations
from pathlib import Path
import yaml
def test_config_defaults_to_local_llama_cpp_runtime() -> None:
config = yaml.safe_load(Path("config.yaml").read_text())
assert config["model"]["provider"] == "custom"
assert config["model"]["default"] == "hermes4:14b"
assert config["model"]["base_url"] == "http://localhost:8081/v1"
local_provider = next(
entry for entry in config["custom_providers"] if entry["name"] == "Local llama.cpp"
)
assert local_provider["model"] == "hermes4:14b"
assert config["fallback_model"]["provider"] == "custom"
assert config["fallback_model"]["model"] == "gemini-2.5-pro"