Compare commits

..

1 Commits

Author SHA1 Message Date
1bc9b1e7f8 fix: Standardize training Makefile on python3
Some checks failed
Architecture Lint / Linter Tests (pull_request) Successful in 38s
Smoke Test / smoke (pull_request) Failing after 33s
Validate Config / YAML Lint (pull_request) Failing after 25s
Validate Config / JSON Validate (pull_request) Successful in 23s
Validate Config / Python Syntax & Import Check (pull_request) Failing after 1m25s
Validate Config / Shell Script Lint (pull_request) Failing after 39s
Validate Config / Cron Syntax Check (pull_request) Successful in 18s
Validate Config / Deploy Script Dry Run (pull_request) Successful in 19s
Validate Config / Playbook Schema Validation (pull_request) Successful in 34s
PR Checklist / pr-checklist (pull_request) Failing after 11m17s
Architecture Lint / Lint Repository (pull_request) Has been cancelled
Validate Config / Python Test Suite (pull_request) Has been cancelled
- Add PYTHON ?= python3 variable for portability
- Replace all bare 'python' calls with $(PYTHON)
- Fixes 'make: python: No such file or directory' on systems without python shim
- Addresses issue #660
2026-04-15 02:31:48 +00:00
2 changed files with 7 additions and 67 deletions

View File

@@ -25,11 +25,9 @@ Usage:
result = evaluate_candidate(scores_path, baseline_path, candidate_id)
"""
import glob
import json
import os
import sys
from datetime import datetime, timedelta, timezone
from datetime import datetime, timezone
from pathlib import Path
from typing import Optional
@@ -65,10 +63,6 @@ MAX_METRIC_REGRESSION = -0.15
# Default paths
DEFAULT_GATE_DIR = Path.home() / ".timmy" / "training-data" / "eval-gates"
# Gate file rotation settings (fixes #628: hash dedup growth)
GATE_FILE_MAX_AGE_DAYS = 7 # Delete gate files older than this
GATE_FILE_MAX_COUNT = 50 # Keep at most this many gate files (excluding latest)
def evaluate_candidate(
scores_path: str | Path,
@@ -245,9 +239,6 @@ def evaluate_candidate(
latest_file = gate_dir / "eval_gate_latest.json"
latest_file.write_text(json.dumps(result, indent=2))
# Rotate old gate files to prevent unbounded growth (#628)
_rotate_gate_files(gate_dir)
return result
@@ -296,58 +287,6 @@ def _find_category_score(
return None
def _rotate_gate_files(gate_dir: Path) -> int:
"""Rotate and clean up old eval gate files.
Prevents unbounded growth of the gate file directory by:
1. Deleting files older than GATE_FILE_MAX_AGE_DAYS
2. Keeping at most GATE_FILE_MAX_COUNT historical files
3. Always preserving eval_gate_latest.json
Returns the number of files deleted.
"""
if not gate_dir.exists():
return 0
deleted = 0
now = datetime.now(timezone.utc)
cutoff = now - timedelta(days=GATE_FILE_MAX_AGE_DAYS)
# Find all eval_gate_*.json files, excluding latest
pattern = str(gate_dir / "eval_gate_*.json")
all_files = glob.glob(pattern)
gate_files = [f for f in all_files if not f.endswith("eval_gate_latest.json")]
# Sort by modification time (oldest first)
gate_files.sort(key=lambda f: os.path.getmtime(f))
for filepath in gate_files:
try:
mtime = datetime.fromtimestamp(os.path.getmtime(filepath), tz=timezone.utc)
# Delete if older than max age
if mtime < cutoff:
os.remove(filepath)
deleted += 1
continue
except OSError:
pass
# Enforce max count (delete oldest first)
remaining = [f for f in gate_files if os.path.exists(f)]
if len(remaining) > GATE_FILE_MAX_COUNT:
excess = remaining[:len(remaining) - GATE_FILE_MAX_COUNT]
for filepath in excess:
try:
os.remove(filepath)
deleted += 1
except OSError:
pass
return deleted
# ── CLI ──────────────────────────────────────────────────────────────
def main():

View File

@@ -16,6 +16,7 @@ MODEL ?= timmy:v0.1-q4
BASELINE ?= hermes3:latest
OLLAMA_URL ?= http://localhost:11434
OUTPUT ?= output
PYTHON ?= python3
# ── Training ──────────────────────────────────────────────────────────
@@ -23,7 +24,7 @@ train-cloud: ## QLoRA fine-tune on cloud GPU (Axolotl)
axolotl train axolotl.yaml
train-local: ## LoRA fine-tune on Apple Silicon (MLX)
python -m mlx_lm.lora --config mlx-lora.yaml
$(PYTHON) -m mlx_lm.lora --config mlx-lora.yaml
# ── Evaluation ────────────────────────────────────────────────────────
@@ -45,7 +46,7 @@ vibes: ## Run vibes check — hand-picked prompts, human review
@echo "Date: $$(date '+%Y-%m-%d %H:%M')" > $(OUTPUT)/vibes-$(MODEL).md
@echo "Model: $(MODEL)" >> $(OUTPUT)/vibes-$(MODEL).md
@echo "" >> $(OUTPUT)/vibes-$(MODEL).md
@python -c "\
@$(PYTHON) -c "\
import yaml, subprocess, sys; \
prompts = yaml.safe_load(open('data/prompts_vibes.yaml'))['prompts']; \
f = open('$(OUTPUT)/vibes-$(MODEL).md', 'a'); \
@@ -69,19 +70,19 @@ vibes: ## Run vibes check — hand-picked prompts, human review
# ── Data Pipeline ─────────────────────────────────────────────────────
ingest: ## Pull heartbeat trajectories into training data
python ingest_trajectories.py \
$(PYTHON) ingest_trajectories.py \
--trajectories ~/.nexus/trajectories/ \
--curated data/curated_dataset.jsonl \
--output data/merged_training_data.jsonl
@echo "Merged dataset ready. Convert for MLX with: make convert"
curated: ## Regenerate curated exemplar dataset
python build_curated.py
$(PYTHON) build_curated.py
@echo "Curated dataset regenerated."
convert: ## Convert merged dataset to MLX format (train/valid split)
@mkdir -p data/mlx_curated
python -c "\
$(PYTHON) -c "\
import json; \
lines = open('data/merged_training_data.jsonl').readlines(); \
sessions = [json.loads(l) for l in lines]; \