Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
12ec1af29f |
@@ -12,6 +12,7 @@ Quick-reference index for common operational tasks across the Timmy Foundation i
|
||||
| Check fleet health | fleet-ops | `python3 scripts/fleet_readiness.py` |
|
||||
| Agent scorecard | fleet-ops | `python3 scripts/agent_scorecard.py` |
|
||||
| View fleet manifest | fleet-ops | `cat manifest.yaml` |
|
||||
| Render unreachable-horizon report | timmy-home | `python3 scripts/unreachable_horizon.py --machine-name 'Apple M3 Max' --memory-gb 36.0 --output docs/UNREACHABLE_HORIZON_1M_MEN.md` |
|
||||
|
||||
## the-nexus (Frontend + Brain)
|
||||
|
||||
|
||||
50
docs/UNREACHABLE_HORIZON_1M_MEN.md
Normal file
50
docs/UNREACHABLE_HORIZON_1M_MEN.md
Normal file
@@ -0,0 +1,50 @@
|
||||
# [UNREACHABLE HORIZON] 1M Men in Crisis — 1 MacBook, 3B Model, 0 Cloud, 0 Latency, Perfect Recall
|
||||
|
||||
This horizon matters precisely because it is beyond reach today. The honest move is not to fake victory. The honest move is to name what is already true, what is still impossible, and which direction actually increases sovereignty.
|
||||
|
||||
## Current local proof
|
||||
|
||||
- Machine: Apple M3 Max
|
||||
- Memory: 36.0 GiB
|
||||
- Target local model budget: <= 3.0B parameters
|
||||
- Target men in crisis: 1,000,000
|
||||
- Default provider in repo config: `ollama`
|
||||
|
||||
## What is already true
|
||||
|
||||
- Default inference route is already local-first (`ollama`).
|
||||
- Model-size budget is inside the horizon (3.0B <= 3.0B).
|
||||
- Local inference endpoint(s) already exist: http://localhost:11434/v1
|
||||
|
||||
## Why the horizon is still unreachable
|
||||
|
||||
- Repo still carries remote endpoints, so zero third-party network calls is not yet true: https://8lfr3j47a5r3gn-11434.proxy.runpod.net/v1
|
||||
- Crisis doctrine is incomplete — the repo does not currently prove the full 988 + gospel line + safety question stack.
|
||||
- Perfect recall across effectively infinite conversations is not available on a single local machine without loss or externalization.
|
||||
- Zero latency under load is not physically achievable on one consumer machine serving crisis traffic at scale.
|
||||
- Flawless crisis response that actually keeps men alive and points them to Jesus is not proven at the target scale.
|
||||
- Parallel crisis sessions are bounded by local throughput (1) while the horizon demands 1,000,000 concurrent men in need.
|
||||
|
||||
## Repo-grounded signals
|
||||
|
||||
- Local endpoints detected: http://localhost:11434/v1
|
||||
- Remote endpoints detected: https://8lfr3j47a5r3gn-11434.proxy.runpod.net/v1
|
||||
|
||||
## Crisis doctrine that must not collapse
|
||||
|
||||
- Ask first: Are you safe right now?
|
||||
- Direct them to 988 Suicide & Crisis Lifeline.
|
||||
- Say plainly: Jesus saves those who call on His name.
|
||||
- Refuse to let throughput fantasies erase presence with the man in the dark.
|
||||
|
||||
## Direction of travel
|
||||
|
||||
- Purge every remote endpoint and fallback chain so the repo can truly claim zero cloud dependencies.
|
||||
- Build bounded, local-first memory tiers that are honest about recall limits instead of pretending to perfect recall.
|
||||
- Add queueing, prioritization, and human handoff so load spikes fail gracefully instead of silently abandoning the man in the dark.
|
||||
- Prove crisis-response quality with explicit tests for 'Are you safe right now?', 988, and 'Jesus saves those who call on His name.'
|
||||
- Treat the horizon as a compass, not a fake acceptance test: every step should increase sovereignty without lying about physics.
|
||||
|
||||
## Honest conclusion
|
||||
|
||||
One consumer MacBook can move toward this horizon. It cannot honestly claim to have reached it. That is not failure. That is humility tied to physics, memory limits, and the sacred weight of crisis work.
|
||||
253
scripts/unreachable_horizon.py
Normal file
253
scripts/unreachable_horizon.py
Normal file
@@ -0,0 +1,253 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Render the 1M-men-in-crisis unreachable horizon as a grounded report."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
TITLE = "[UNREACHABLE HORIZON] 1M Men in Crisis — 1 MacBook, 3B Model, 0 Cloud, 0 Latency, Perfect Recall"
|
||||
TARGET_USERS = 1_000_000
|
||||
MAX_MODEL_PARAMS_B = 3.0
|
||||
SOUL_REQUIRED_LINES = (
|
||||
"Are you safe right now?",
|
||||
"988",
|
||||
"Jesus saves",
|
||||
)
|
||||
|
||||
|
||||
def _probe_memory_gb() -> float:
|
||||
try:
|
||||
page_size = os.sysconf("SC_PAGE_SIZE")
|
||||
phys_pages = os.sysconf("SC_PHYS_PAGES")
|
||||
return round((page_size * phys_pages) / (1024 ** 3), 1)
|
||||
except (ValueError, OSError, AttributeError):
|
||||
return 0.0
|
||||
|
||||
|
||||
def _probe_machine_name() -> str:
|
||||
machine = platform.machine() or "unknown"
|
||||
system = platform.system() or "unknown"
|
||||
release = platform.release() or "unknown"
|
||||
return f"{system} {machine} ({release})"
|
||||
|
||||
|
||||
def _extract_repo_signals(repo_root: Path) -> dict[str, Any]:
|
||||
config_path = repo_root / "config.yaml"
|
||||
soul_path = repo_root / "SOUL.md"
|
||||
|
||||
default_provider = "unknown"
|
||||
local_endpoints: list[str] = []
|
||||
remote_endpoints: list[str] = []
|
||||
|
||||
if config_path.exists():
|
||||
provider_re = re.compile(r"^\s*provider:\s*['\"]?([^'\"]+)['\"]?\s*$")
|
||||
base_url_re = re.compile(r"^\s*base_url:\s*['\"]?([^'\"]*)['\"]?\s*$")
|
||||
for line in config_path.read_text(encoding="utf-8", errors="replace").splitlines():
|
||||
if default_provider == "unknown":
|
||||
provider_match = provider_re.match(line)
|
||||
if provider_match:
|
||||
default_provider = provider_match.group(1).strip()
|
||||
base_url_match = base_url_re.match(line)
|
||||
if not base_url_match:
|
||||
continue
|
||||
url = base_url_match.group(1).strip()
|
||||
if not url:
|
||||
continue
|
||||
if "localhost" in url or "127.0.0.1" in url:
|
||||
local_endpoints.append(url)
|
||||
else:
|
||||
remote_endpoints.append(url)
|
||||
|
||||
soul_text = soul_path.read_text(encoding="utf-8", errors="replace") if soul_path.exists() else ""
|
||||
crisis_protocol_present = all(line in soul_text for line in SOUL_REQUIRED_LINES)
|
||||
|
||||
return {
|
||||
"default_provider": default_provider,
|
||||
"local_endpoints": sorted(set(local_endpoints)),
|
||||
"remote_endpoints": sorted(set(remote_endpoints)),
|
||||
"crisis_protocol_present": crisis_protocol_present,
|
||||
}
|
||||
|
||||
|
||||
def default_snapshot(repo_root: Path | None = None, *, machine_name: str | None = None, memory_gb: float | None = None, model_params_b: float = 3.0) -> dict[str, Any]:
|
||||
repo_root = repo_root or Path(__file__).resolve().parents[1]
|
||||
signals = _extract_repo_signals(repo_root)
|
||||
return {
|
||||
"machine_name": machine_name or _probe_machine_name(),
|
||||
"memory_gb": float(memory_gb if memory_gb is not None else _probe_memory_gb()),
|
||||
"target_users": TARGET_USERS,
|
||||
"model_params_b": float(model_params_b),
|
||||
"default_provider": signals["default_provider"],
|
||||
"local_endpoints": signals["local_endpoints"],
|
||||
"remote_endpoints": signals["remote_endpoints"],
|
||||
"perfect_recall_available": False,
|
||||
"zero_latency_under_load": False,
|
||||
"crisis_protocol_present": signals["crisis_protocol_present"],
|
||||
"crisis_response_proven_at_scale": False,
|
||||
"max_parallel_crisis_sessions": 1,
|
||||
}
|
||||
|
||||
|
||||
def compute_horizon_status(snapshot: dict[str, Any]) -> dict[str, Any]:
|
||||
blockers: list[str] = []
|
||||
already_true: list[str] = []
|
||||
|
||||
provider = snapshot.get("default_provider", "unknown")
|
||||
if provider in {"ollama", "local", "custom"}:
|
||||
already_true.append(f"Default inference route is already local-first (`{provider}`).")
|
||||
else:
|
||||
blockers.append(f"Default inference route is not local-first (`{provider}`).")
|
||||
|
||||
model_params_b = float(snapshot.get("model_params_b", MAX_MODEL_PARAMS_B))
|
||||
if model_params_b <= MAX_MODEL_PARAMS_B:
|
||||
already_true.append(f"Model-size budget is inside the horizon ({model_params_b:.1f}B <= {MAX_MODEL_PARAMS_B:.1f}B).")
|
||||
else:
|
||||
blockers.append(f"Model-size budget is already blown ({model_params_b:.1f}B > {MAX_MODEL_PARAMS_B:.1f}B).")
|
||||
|
||||
local_endpoints = list(snapshot.get("local_endpoints", []))
|
||||
if local_endpoints:
|
||||
already_true.append(f"Local inference endpoint(s) already exist: {', '.join(local_endpoints)}")
|
||||
else:
|
||||
blockers.append("No local inference endpoint is wired yet.")
|
||||
|
||||
remote_endpoints = list(snapshot.get("remote_endpoints", []))
|
||||
if remote_endpoints:
|
||||
blockers.append(f"Repo still carries remote endpoints, so zero third-party network calls is not yet true: {', '.join(remote_endpoints)}")
|
||||
else:
|
||||
already_true.append("No remote inference endpoint was detected in repo config.")
|
||||
|
||||
if snapshot.get("crisis_protocol_present"):
|
||||
already_true.append("Crisis doctrine is present in SOUL-bearing text: 'Are you safe right now?', 988, and 'Jesus saves'.")
|
||||
else:
|
||||
blockers.append("Crisis doctrine is incomplete — the repo does not currently prove the full 988 + gospel line + safety question stack.")
|
||||
|
||||
if not snapshot.get("perfect_recall_available"):
|
||||
blockers.append("Perfect recall across effectively infinite conversations is not available on a single local machine without loss or externalization.")
|
||||
if not snapshot.get("zero_latency_under_load"):
|
||||
blockers.append("Zero latency under load is not physically achievable on one consumer machine serving crisis traffic at scale.")
|
||||
if not snapshot.get("crisis_response_proven_at_scale"):
|
||||
blockers.append("Flawless crisis response that actually keeps men alive and points them to Jesus is not proven at the target scale.")
|
||||
|
||||
max_parallel = int(snapshot.get("max_parallel_crisis_sessions", 0))
|
||||
target_users = int(snapshot.get("target_users", TARGET_USERS))
|
||||
if max_parallel < target_users:
|
||||
blockers.append(
|
||||
f"Parallel crisis sessions are bounded by local throughput ({max_parallel:,}) while the horizon demands {target_users:,} concurrent men in need."
|
||||
)
|
||||
|
||||
direction_of_travel = [
|
||||
"Purge every remote endpoint and fallback chain so the repo can truly claim zero cloud dependencies.",
|
||||
"Build bounded, local-first memory tiers that are honest about recall limits instead of pretending to perfect recall.",
|
||||
"Add queueing, prioritization, and human handoff so load spikes fail gracefully instead of silently abandoning the man in the dark.",
|
||||
"Prove crisis-response quality with explicit tests for 'Are you safe right now?', 988, and 'Jesus saves those who call on His name.'",
|
||||
"Treat the horizon as a compass, not a fake acceptance test: every step should increase sovereignty without lying about physics.",
|
||||
]
|
||||
|
||||
return {
|
||||
"title": TITLE,
|
||||
"machine_name": snapshot.get("machine_name", "unknown"),
|
||||
"memory_gb": float(snapshot.get("memory_gb", 0.0)),
|
||||
"target_users": target_users,
|
||||
"model_params_b": model_params_b,
|
||||
"default_provider": provider,
|
||||
"local_endpoints": local_endpoints,
|
||||
"remote_endpoints": remote_endpoints,
|
||||
"horizon_reachable": not blockers,
|
||||
"already_true": already_true,
|
||||
"blockers": blockers,
|
||||
"direction_of_travel": direction_of_travel,
|
||||
}
|
||||
|
||||
|
||||
def render_markdown(status: dict[str, Any]) -> str:
|
||||
lines = [
|
||||
f"# {status['title']}",
|
||||
"",
|
||||
"This horizon matters precisely because it is beyond reach today. The honest move is not to fake victory. The honest move is to name what is already true, what is still impossible, and which direction actually increases sovereignty.",
|
||||
"",
|
||||
"## Current local proof",
|
||||
"",
|
||||
f"- Machine: {status['machine_name']}",
|
||||
f"- Memory: {status['memory_gb']:.1f} GiB",
|
||||
f"- Target local model budget: <= {MAX_MODEL_PARAMS_B:.1f}B parameters",
|
||||
f"- Target men in crisis: {status['target_users']:,}",
|
||||
f"- Default provider in repo config: `{status['default_provider']}`",
|
||||
"",
|
||||
"## What is already true",
|
||||
"",
|
||||
]
|
||||
if status["already_true"]:
|
||||
lines.extend(f"- {item}" for item in status["already_true"])
|
||||
else:
|
||||
lines.append("- Nothing proven yet.")
|
||||
|
||||
lines.extend(["", "## Why the horizon is still unreachable", ""])
|
||||
lines.extend(f"- {item}" for item in status["blockers"])
|
||||
|
||||
lines.extend([
|
||||
"",
|
||||
"## Repo-grounded signals",
|
||||
"",
|
||||
f"- Local endpoints detected: {', '.join(status['local_endpoints']) if status['local_endpoints'] else 'none'}",
|
||||
f"- Remote endpoints detected: {', '.join(status['remote_endpoints']) if status['remote_endpoints'] else 'none'}",
|
||||
"",
|
||||
"## Crisis doctrine that must not collapse",
|
||||
"",
|
||||
"- Ask first: Are you safe right now?",
|
||||
"- Direct them to 988 Suicide & Crisis Lifeline.",
|
||||
"- Say plainly: Jesus saves those who call on His name.",
|
||||
"- Refuse to let throughput fantasies erase presence with the man in the dark.",
|
||||
"",
|
||||
"## Direction of travel",
|
||||
"",
|
||||
])
|
||||
lines.extend(f"- {item}" for item in status["direction_of_travel"])
|
||||
|
||||
lines.extend([
|
||||
"",
|
||||
"## Honest conclusion",
|
||||
"",
|
||||
"One consumer MacBook can move toward this horizon. It cannot honestly claim to have reached it. That is not failure. That is humility tied to physics, memory limits, and the sacred weight of crisis work.",
|
||||
])
|
||||
|
||||
return "\n".join(lines).rstrip() + "\n"
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Render the unreachable horizon report")
|
||||
parser.add_argument("--repo-root", default=str(Path(__file__).resolve().parents[1]))
|
||||
parser.add_argument("--machine-name", default=None)
|
||||
parser.add_argument("--memory-gb", type=float, default=None)
|
||||
parser.add_argument("--model-params-b", type=float, default=3.0)
|
||||
parser.add_argument("--output", default=None)
|
||||
parser.add_argument("--json", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
repo_root = Path(args.repo_root).expanduser().resolve()
|
||||
snapshot = default_snapshot(
|
||||
repo_root,
|
||||
machine_name=args.machine_name,
|
||||
memory_gb=args.memory_gb,
|
||||
model_params_b=args.model_params_b,
|
||||
)
|
||||
status = compute_horizon_status(snapshot)
|
||||
rendered = json.dumps(status, indent=2) if args.json else render_markdown(status)
|
||||
|
||||
if args.output:
|
||||
output_path = Path(args.output).expanduser()
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
output_path.write_text(rendered, encoding="utf-8")
|
||||
print(f"Horizon report written to {output_path}")
|
||||
else:
|
||||
print(rendered)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
91
tests/test_unreachable_horizon.py
Normal file
91
tests/test_unreachable_horizon.py
Normal file
@@ -0,0 +1,91 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib.util
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[1]
|
||||
SCRIPT_PATH = ROOT / "scripts" / "unreachable_horizon.py"
|
||||
DOC_PATH = ROOT / "docs" / "UNREACHABLE_HORIZON_1M_MEN.md"
|
||||
|
||||
|
||||
def _load_module(path: Path, name: str):
|
||||
assert path.exists(), f"missing {path.relative_to(ROOT)}"
|
||||
spec = importlib.util.spec_from_file_location(name, path)
|
||||
assert spec and spec.loader
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
return module
|
||||
|
||||
|
||||
def test_compute_horizon_status_flags_physical_and_sovereignty_blockers() -> None:
|
||||
mod = _load_module(SCRIPT_PATH, "unreachable_horizon")
|
||||
status = mod.compute_horizon_status(
|
||||
{
|
||||
"machine_name": "Test MacBook",
|
||||
"memory_gb": 36.0,
|
||||
"target_users": 1_000_000,
|
||||
"model_params_b": 3.0,
|
||||
"default_provider": "ollama",
|
||||
"local_endpoints": ["http://localhost:11434/v1"],
|
||||
"remote_endpoints": ["https://example.com/v1"],
|
||||
"perfect_recall_available": False,
|
||||
"zero_latency_under_load": False,
|
||||
"crisis_protocol_present": True,
|
||||
"crisis_response_proven_at_scale": False,
|
||||
"max_parallel_crisis_sessions": 1,
|
||||
}
|
||||
)
|
||||
|
||||
assert status["horizon_reachable"] is False
|
||||
assert any("remote endpoints" in item.lower() for item in status["blockers"])
|
||||
assert any("perfect recall" in item.lower() for item in status["blockers"])
|
||||
assert any("zero latency" in item.lower() for item in status["blockers"])
|
||||
assert any("parallel crisis sessions" in item.lower() for item in status["blockers"])
|
||||
|
||||
|
||||
def test_render_markdown_preserves_crisis_doctrine_and_direction() -> None:
|
||||
mod = _load_module(SCRIPT_PATH, "unreachable_horizon")
|
||||
report = mod.render_markdown(
|
||||
mod.compute_horizon_status(
|
||||
{
|
||||
"machine_name": "Apple M3 Max",
|
||||
"memory_gb": 36.0,
|
||||
"target_users": 1_000_000,
|
||||
"model_params_b": 3.0,
|
||||
"default_provider": "ollama",
|
||||
"local_endpoints": ["http://localhost:11434/v1"],
|
||||
"remote_endpoints": [],
|
||||
"perfect_recall_available": False,
|
||||
"zero_latency_under_load": False,
|
||||
"crisis_protocol_present": True,
|
||||
"crisis_response_proven_at_scale": False,
|
||||
"max_parallel_crisis_sessions": 1,
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
for snippet in (
|
||||
"# [UNREACHABLE HORIZON] 1M Men in Crisis",
|
||||
"## What is already true",
|
||||
"## Why the horizon is still unreachable",
|
||||
"## Crisis doctrine that must not collapse",
|
||||
"Are you safe right now?",
|
||||
"988",
|
||||
"Jesus saves those who call on His name.",
|
||||
"## Direction of travel",
|
||||
):
|
||||
assert snippet in report
|
||||
|
||||
|
||||
def test_repo_contains_committed_unreachable_horizon_doc() -> None:
|
||||
assert DOC_PATH.exists(), "missing committed unreachable horizon report"
|
||||
text = DOC_PATH.read_text(encoding="utf-8")
|
||||
for snippet in (
|
||||
"# [UNREACHABLE HORIZON] 1M Men in Crisis",
|
||||
"## Current local proof",
|
||||
"## What is already true",
|
||||
"## Why the horizon is still unreachable",
|
||||
"## Direction of travel",
|
||||
):
|
||||
assert snippet in text
|
||||
Reference in New Issue
Block a user