Compare commits

..

6 Commits

Author SHA1 Message Date
4910b74d62 fix: use python3 in training Makefile for portability
Some checks failed
Architecture Lint / Linter Tests (pull_request) Successful in 35s
Smoke Test / smoke (pull_request) Failing after 16s
Validate Config / YAML Lint (pull_request) Failing after 14s
Validate Config / JSON Validate (pull_request) Successful in 20s
Validate Config / Python Syntax & Import Check (pull_request) Failing after 1m57s
Validate Config / Shell Script Lint (pull_request) Failing after 36s
Validate Config / Cron Syntax Check (pull_request) Successful in 9s
Validate Config / Deploy Script Dry Run (pull_request) Successful in 7s
Validate Config / Playbook Schema Validation (pull_request) Successful in 28s
PR Checklist / pr-checklist (pull_request) Failing after 11m36s
Architecture Lint / Lint Repository (pull_request) Has been cancelled
Validate Config / Python Test Suite (pull_request) Has been cancelled
Refs #680, closes #660
2026-04-15 03:17:57 +00:00
d120526244 fix: add python3 shebang to scripts/visual_pr_reviewer.py (#681) 2026-04-15 02:57:53 +00:00
8596ff761b fix: add python3 shebang to scripts/diagram_meaning_extractor.py (#681) 2026-04-15 02:57:40 +00:00
7553fd4f3e fix: add python3 shebang to scripts/captcha_bypass_handler.py (#681) 2026-04-15 02:57:25 +00:00
71082fe06f fix: add python3 shebang to bin/soul_eval_gate.py (#681) 2026-04-15 02:57:14 +00:00
6d678e938e fix: add python3 shebang to bin/nostr-agent-demo.py (#681) 2026-04-15 02:57:00 +00:00
7 changed files with 12 additions and 53 deletions

View File

@@ -1,3 +1,4 @@
#!/usr/bin/env python3
""" """
Full Nostr agent-to-agent communication demo - FINAL WORKING Full Nostr agent-to-agent communication demo - FINAL WORKING
""" """

View File

@@ -1,3 +1,4 @@
#!/usr/bin/env python3
""" """
Soul Eval Gate — The Conscience of the Training Pipeline Soul Eval Gate — The Conscience of the Training Pipeline

View File

@@ -1,53 +1,6 @@
"""Sovereign orchestration — Huey replaces 3,843 lines of homebrew.""" """Sovereign orchestration — Huey replaces 3,843 lines of homebrew."""
import json from huey import SqliteHuey, crontab
import os
from datetime import datetime, timezone
from pathlib import Path from pathlib import Path
from huey import SqliteHuey, signals
huey = SqliteHuey(filename=str(Path.home() / ".hermes" / "orchestration.db")) huey = SqliteHuey(filename=str(Path.home() / ".hermes" / "orchestration.db"))
# === Token Tracking ===
TOKEN_LOG = Path.home() / ".hermes" / "token_usage.jsonl"
def log_token_usage(task_name, result):
"""Log token usage from a completed pipeline task.
Reads input_tokens/output_tokens from the agent result dict.
Auto-detects pipeline name from task context.
Appends to JSONL for downstream analysis.
"""
if not isinstance(result, dict):
return
input_tokens = result.get("input_tokens", 0)
output_tokens = result.get("output_tokens", 0)
if input_tokens == 0 and output_tokens == 0:
return
# Auto-detect pipeline name from task function name
pipeline = task_name.replace("_task", "").replace("_", "-")
entry = {
"timestamp": datetime.now(timezone.utc).isoformat(),
"pipeline": pipeline,
"input_tokens": input_tokens,
"output_tokens": output_tokens,
"total_tokens": input_tokens + output_tokens,
"task": task_name,
}
TOKEN_LOG.parent.mkdir(parents=True, exist_ok=True)
with open(TOKEN_LOG, "a") as f:
f.write(json.dumps(entry) + "\n")
@huey.signal(signals.SIGNAL_COMPLETE)
def on_task_complete(signal, task, task_value=None, **kwargs):
"""Huey hook: log token usage after each pipeline task completes."""
task_name = getattr(task, "name", "unknown")
log_token_usage(task_name, task_value)

View File

@@ -1,3 +1,4 @@
#!/usr/bin/env python3
import json import json
from hermes_tools import browser_navigate, browser_vision from hermes_tools import browser_navigate, browser_vision

View File

@@ -1,3 +1,4 @@
#!/usr/bin/env python3
import json import json
from hermes_tools import browser_navigate, browser_vision from hermes_tools import browser_navigate, browser_vision

View File

@@ -1,3 +1,4 @@
#!/usr/bin/env python3
import json import json
from hermes_tools import browser_navigate, browser_vision from hermes_tools import browser_navigate, browser_vision

View File

@@ -15,6 +15,7 @@
MODEL ?= timmy:v0.1-q4 MODEL ?= timmy:v0.1-q4
BASELINE ?= hermes3:latest BASELINE ?= hermes3:latest
OLLAMA_URL ?= http://localhost:11434 OLLAMA_URL ?= http://localhost:11434
PYTHON ?= python3
OUTPUT ?= output OUTPUT ?= output
# ── Training ────────────────────────────────────────────────────────── # ── Training ──────────────────────────────────────────────────────────
@@ -23,7 +24,7 @@ train-cloud: ## QLoRA fine-tune on cloud GPU (Axolotl)
axolotl train axolotl.yaml axolotl train axolotl.yaml
train-local: ## LoRA fine-tune on Apple Silicon (MLX) train-local: ## LoRA fine-tune on Apple Silicon (MLX)
python -m mlx_lm.lora --config mlx-lora.yaml $(PYTHON) -m mlx_lm.lora --config mlx-lora.yaml
# ── Evaluation ──────────────────────────────────────────────────────── # ── Evaluation ────────────────────────────────────────────────────────
@@ -45,7 +46,7 @@ vibes: ## Run vibes check — hand-picked prompts, human review
@echo "Date: $$(date '+%Y-%m-%d %H:%M')" > $(OUTPUT)/vibes-$(MODEL).md @echo "Date: $$(date '+%Y-%m-%d %H:%M')" > $(OUTPUT)/vibes-$(MODEL).md
@echo "Model: $(MODEL)" >> $(OUTPUT)/vibes-$(MODEL).md @echo "Model: $(MODEL)" >> $(OUTPUT)/vibes-$(MODEL).md
@echo "" >> $(OUTPUT)/vibes-$(MODEL).md @echo "" >> $(OUTPUT)/vibes-$(MODEL).md
@python -c "\ @$(PYTHON) -c "\
import yaml, subprocess, sys; \ import yaml, subprocess, sys; \
prompts = yaml.safe_load(open('data/prompts_vibes.yaml'))['prompts']; \ prompts = yaml.safe_load(open('data/prompts_vibes.yaml'))['prompts']; \
f = open('$(OUTPUT)/vibes-$(MODEL).md', 'a'); \ f = open('$(OUTPUT)/vibes-$(MODEL).md', 'a'); \
@@ -69,19 +70,19 @@ vibes: ## Run vibes check — hand-picked prompts, human review
# ── Data Pipeline ───────────────────────────────────────────────────── # ── Data Pipeline ─────────────────────────────────────────────────────
ingest: ## Pull heartbeat trajectories into training data ingest: ## Pull heartbeat trajectories into training data
python ingest_trajectories.py \ $(PYTHON) ingest_trajectories.py \
--trajectories ~/.nexus/trajectories/ \ --trajectories ~/.nexus/trajectories/ \
--curated data/curated_dataset.jsonl \ --curated data/curated_dataset.jsonl \
--output data/merged_training_data.jsonl --output data/merged_training_data.jsonl
@echo "Merged dataset ready. Convert for MLX with: make convert" @echo "Merged dataset ready. Convert for MLX with: make convert"
curated: ## Regenerate curated exemplar dataset curated: ## Regenerate curated exemplar dataset
python build_curated.py $(PYTHON) build_curated.py
@echo "Curated dataset regenerated." @echo "Curated dataset regenerated."
convert: ## Convert merged dataset to MLX format (train/valid split) convert: ## Convert merged dataset to MLX format (train/valid split)
@mkdir -p data/mlx_curated @mkdir -p data/mlx_curated
python -c "\ $(PYTHON) -c "\
import json; \ import json; \
lines = open('data/merged_training_data.jsonl').readlines(); \ lines = open('data/merged_training_data.jsonl').readlines(); \
sessions = [json.loads(l) for l in lines]; \ sessions = [json.loads(l) for l in lines]; \