Compare commits

..

2 Commits

Author SHA1 Message Date
Alexander Whitestone
f68f110d0e feat: add tower npc relationship graph for #515
Some checks failed
Agent PR Gate / gate (pull_request) Failing after 13s
Self-Healing Smoke / self-healing-smoke (pull_request) Failing after 17s
Smoke Test / smoke (pull_request) Failing after 18s
Agent PR Gate / report (pull_request) Successful in 15s
2026-04-22 02:14:03 -04:00
Alexander Whitestone
289f0410aa test: define tower npc relationships for #515 2026-04-22 02:13:46 -04:00
5 changed files with 174 additions and 267 deletions

View File

@@ -12,6 +12,27 @@ WORLD_DIR = Path('/Users/apayne/.timmy/evennia/timmy_world')
STATE_FILE = WORLD_DIR / 'game_state.json'
TIMMY_LOG = WORLD_DIR / 'timmy_log.md'
FRIENDSHIP_THRESHOLD = 0.5
TENSION_THRESHOLD = -0.5
NPC_RELATIONSHIP_SEEDS = {
("Kimi", "Marcus"): {
"values": {"Kimi": 0.45, "Marcus": 0.47},
"conversation": "While you are away, Marcus and Kimi trade a quiet confidence beneath the oak.",
"milestone": "A friendship starts to take root between Marcus and Kimi.",
"hint": "Marcus and Kimi move with the easy familiarity of old friends.",
"delta": 0.08,
"kind": "friendship",
},
("Bezalel", "ClawCode"): {
"values": {"Bezalel": -0.46, "ClawCode": -0.44},
"conversation": "While you are away, Bezalel and ClawCode clash over what the forge is for.",
"milestone": "Tension hardens between Bezalel and ClawCode at the anvil.",
"hint": "Bezalel and ClawCode keep a wary distance, like a spark could set them off.",
"delta": -0.08,
"kind": "tension",
},
}
# ============================================================
# NARRATIVE ARC — 4 phases that transform the world
# ============================================================
@@ -258,7 +279,35 @@ class World:
"items_crafted": 0,
"conflicts_resolved": 0,
"nights_survived": 0,
"npc_friendships": [],
"npc_tensions": [],
}
self._initialize_npc_relationships(apply_seeds=True)
def _initialize_npc_relationships(self, apply_seeds=False):
npc_names = [name for name, char in self.characters.items() if not char.get("is_player", False)]
for npc_name in npc_names:
trust_map = self.characters[npc_name]["trust"]
for other_name in npc_names:
if other_name != npc_name:
trust_map.setdefault(other_name, 0.0)
if apply_seeds:
for pair, seed in NPC_RELATIONSHIP_SEEDS.items():
left, right = pair
self.characters[left]["trust"][right] = seed["values"][left]
self.characters[right]["trust"][left] = seed["values"][right]
self.state.setdefault("npc_friendships", [])
self.state.setdefault("npc_tensions", [])
def relationship_hint_for_room(self, room_name, occupants):
hints = []
occupant_set = set(occupants)
for bucket in ("npc_friendships", "npc_tensions"):
for entry in self.state.get(bucket, []):
pair = set(entry.get("pair", []))
if entry.get("room") == room_name and pair.issubset(occupant_set):
hints.append(entry.get("hint", ""))
return [hint for hint in hints if hint]
def tick_time(self):
"""Advance time of day."""
@@ -389,6 +438,8 @@ class World:
here = [n for n, c in self.characters.items() if c["room"] == room_name and n != char_name]
if here:
desc += f"\n Here: {', '.join(here)}"
for hint in self.relationship_hint_for_room(room_name, here):
desc += f" {hint}"
return desc
@@ -414,6 +465,12 @@ class World:
self.rooms = data.get("rooms", self.rooms)
self.characters = data.get("characters", self.characters)
self.state = data.get("state", self.state)
needs_seed = not any(
any(other != "Timmy" for other in char.get("trust", {}))
for name, char in self.characters.items()
if not char.get("is_player", False)
)
self._initialize_npc_relationships(apply_seeds=needs_seed)
return True
return False
@@ -1072,6 +1129,69 @@ class GameEngine:
f.write(f"\n*Began: {datetime.now().strftime('%Y-%m-%d %H:%M')}*\n\n")
f.write("---\n\n")
f.write(message + "\n")
def _adjust_mutual_trust(self, left, right, delta):
for speaker, listener in ((left, right), (right, left)):
trust_map = self.world.characters[speaker]["trust"]
trust_map[listener] = max(-1.0, min(1.0, trust_map.get(listener, 0.0) + delta))
def _record_relationship_milestone(self, scene, room_name, pair, bucket, milestone, hint):
pair_list = list(pair)
entries = self.world.state.setdefault(bucket, [])
if any(entry.get("pair") == pair_list for entry in entries):
return
entries.append({
"pair": pair_list,
"room": room_name,
"summary": milestone,
"hint": hint,
})
scene["world_events"].append(milestone)
def _run_offscreen_npc_relationships(self, scene):
timmy_room = self.world.characters["Timmy"]["room"]
rooms = {}
for char_name, char in self.world.characters.items():
if char.get("is_player", False):
continue
rooms.setdefault(char["room"], []).append(char_name)
for room_name, occupants in rooms.items():
if room_name == timmy_room or len(occupants) < 2:
continue
occupant_set = set(occupants)
for pair, seed in NPC_RELATIONSHIP_SEEDS.items():
if not set(pair).issubset(occupant_set):
continue
left, right = pair
self._adjust_mutual_trust(left, right, seed["delta"])
scene["npc_actions"].append(f"{left} and {right} speak in The {room_name} while you are away.")
scene["world_events"].append(seed["conversation"])
self.world.characters[left]["spoken"].append(seed["conversation"])
self.world.characters[right]["spoken"].append(seed["conversation"])
self.world.characters[left]["memories"].append(seed["conversation"])
self.world.characters[right]["memories"].append(seed["conversation"])
left_trust = self.world.characters[left]["trust"][right]
right_trust = self.world.characters[right]["trust"][left]
if seed["kind"] == "friendship" and left_trust >= FRIENDSHIP_THRESHOLD and right_trust >= FRIENDSHIP_THRESHOLD:
self._record_relationship_milestone(
scene,
room_name,
pair,
"npc_friendships",
seed["milestone"],
seed["hint"],
)
elif seed["kind"] == "tension" and left_trust <= TENSION_THRESHOLD and right_trust <= TENSION_THRESHOLD:
self._record_relationship_milestone(
scene,
room_name,
pair,
"npc_tensions",
seed["milestone"],
seed["hint"],
)
def run_tick(self, timmy_action="look"):
"""Run one tick. Return the scene and available choices."""
@@ -1397,6 +1517,8 @@ class GameEngine:
self.world.characters[char_name]["room"] = dest
self.world.characters[char_name]["energy"] -= 1
scene["npc_actions"].append(f"{char_name} moves from The {old_room} to The {dest}")
self._run_offscreen_npc_relationships(scene)
# Random NPC events — phase-aware speech
room_name = self.world.characters["Timmy"]["room"]

View File

@@ -62,24 +62,6 @@ Writes:
## Usage
### Timmy Mac wiring helper
Use the dedicated Timmy helper when you want to wire a real RunPod or Vertex-style endpoint into the local Mac Hermes config:
```bash
python3 scripts/timmy_gemma4_mac.py --base-url https://your-openai-bridge.example/v1 --write-config
python3 scripts/timmy_gemma4_mac.py --vertex-base-url https://your-vertex-bridge.example --write-config
python3 scripts/timmy_gemma4_mac.py --pod-id <runpod-id> --write-config --verify-chat
```
The helper writes to `~/.hermes/config.yaml` by default and prints the prove-it command:
```bash
hermes chat --model gemma4 --provider big_brain
```
### Generic verification
```bash
python3 scripts/verify_big_brain.py
python3 scripts/big_brain_manager.py

View File

@@ -1,164 +0,0 @@
#!/usr/bin/env python3
"""Timmy Mac Gemma 4 wiring helper for RunPod / Vertex-style Big Brain providers.
Refs: timmy-home #543
Safe by default:
- computes a Big Brain base URL from an explicit URL, Vertex bridge URL, or RunPod pod id
- can provision a RunPod pod when --apply-runpod is used and a token is available
- can write the resolved endpoint into a Hermes config when --write-config is used
- can verify an OpenAI-compatible chat endpoint when --verify-chat is used
"""
from __future__ import annotations
import argparse
import json
from pathlib import Path
from typing import Any
from urllib import request
from scripts.bezalel_gemma4_vps import (
DEFAULT_CLOUD_TYPE,
DEFAULT_GPU_TYPE,
DEFAULT_MODEL,
DEFAULT_PROVIDER_NAME,
build_runpod_endpoint,
deploy_runpod,
update_config_text,
)
DEFAULT_TOKEN_FILE = Path.home() / ".config" / "runpod" / "access_key"
DEFAULT_CONFIG_PATH = Path.home() / ".hermes" / "config.yaml"
def _normalize_openai_base(base_url: str | None) -> str:
if not base_url:
return ""
cleaned = str(base_url).strip().rstrip("/")
return cleaned if cleaned.endswith("/v1") else f"{cleaned}/v1"
def choose_base_url(*, vertex_base_url: str | None = None, base_url: str | None = None, pod_id: str | None = None) -> str:
if vertex_base_url:
return _normalize_openai_base(vertex_base_url)
if base_url:
return _normalize_openai_base(base_url)
if pod_id:
return build_runpod_endpoint(pod_id)
return "https://YOUR_BIG_BRAIN_HOST/v1"
def write_config_file(config_path: Path, *, base_url: str, model: str = DEFAULT_MODEL, provider_name: str = DEFAULT_PROVIDER_NAME) -> str:
original = config_path.read_text() if config_path.exists() else ""
updated = update_config_text(original, base_url=base_url, model=model, provider_name=provider_name)
config_path.parent.mkdir(parents=True, exist_ok=True)
config_path.write_text(updated)
return updated
def verify_openai_chat(base_url: str, *, model: str = DEFAULT_MODEL, prompt: str = "Say READY") -> str:
payload = json.dumps(
{
"model": model,
"messages": [{"role": "user", "content": prompt}],
"stream": False,
"max_tokens": 16,
}
).encode()
req = request.Request(
f"{base_url.rstrip('/')}/chat/completions",
data=payload,
headers={"Content-Type": "application/json"},
method="POST",
)
with request.urlopen(req, timeout=30) as resp:
data = json.loads(resp.read().decode())
return data["choices"][0]["message"]["content"]
def build_summary(*, base_url: str, model: str, provider_name: str = DEFAULT_PROVIDER_NAME, config_path: Path = DEFAULT_CONFIG_PATH) -> dict[str, Any]:
return {
"provider_name": provider_name,
"base_url": base_url,
"model": model,
"config_path": str(config_path),
"verification_commands": [
"python3 scripts/verify_big_brain.py",
f"python3 scripts/timmy_gemma4_mac.py --base-url {base_url} --write-config --verify-chat",
"hermes chat --model gemma4 --provider big_brain",
],
}
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Wire a RunPod/Vertex Gemma 4 endpoint into Timmy's Mac Hermes config.")
parser.add_argument("--pod-name", default="timmy-gemma4")
parser.add_argument("--gpu-type", default=DEFAULT_GPU_TYPE)
parser.add_argument("--cloud-type", default=DEFAULT_CLOUD_TYPE)
parser.add_argument("--model", default=DEFAULT_MODEL)
parser.add_argument("--provider-name", default=DEFAULT_PROVIDER_NAME)
parser.add_argument("--token-file", type=Path, default=DEFAULT_TOKEN_FILE)
parser.add_argument("--config-path", type=Path, default=DEFAULT_CONFIG_PATH)
parser.add_argument("--pod-id", help="Existing RunPod pod id to convert into an OpenAI-compatible base URL")
parser.add_argument("--base-url", help="Explicit OpenAI-compatible base URL")
parser.add_argument("--vertex-base-url", help="Vertex AI OpenAI-compatible bridge base URL")
parser.add_argument("--apply-runpod", action="store_true", help="Provision a RunPod pod using the RunPod GraphQL API")
parser.add_argument("--write-config", action="store_true", help="Write the resolved endpoint into --config-path")
parser.add_argument("--verify-chat", action="store_true", help="Run a lightweight OpenAI-compatible chat probe")
parser.add_argument("--json", action="store_true", help="Emit machine-readable JSON")
return parser.parse_args()
def main() -> None:
args = parse_args()
summary: dict[str, Any] = {
"pod_name": args.pod_name,
"gpu_type": args.gpu_type,
"cloud_type": args.cloud_type,
"model": args.model,
"provider_name": args.provider_name,
"actions": [],
}
base_url = choose_base_url(vertex_base_url=args.vertex_base_url, base_url=args.base_url, pod_id=args.pod_id)
if args.apply_runpod:
if not args.token_file.exists():
raise SystemExit(f"RunPod token file not found: {args.token_file}")
api_key = args.token_file.read_text().strip()
deployed = deploy_runpod(api_key=api_key, name=args.pod_name, gpu_type=args.gpu_type, cloud_type=args.cloud_type, model=args.model)
summary["deployment"] = deployed
base_url = deployed["base_url"]
summary["actions"].append("deployed_runpod_pod")
summary.update(build_summary(base_url=base_url, model=args.model, provider_name=args.provider_name, config_path=args.config_path))
if args.write_config:
write_config_file(args.config_path, base_url=base_url, model=args.model, provider_name=args.provider_name)
summary["actions"].append("wrote_config")
if args.verify_chat:
summary["verify_response"] = verify_openai_chat(base_url, model=args.model)
summary["actions"].append("verified_chat")
if args.json:
print(json.dumps(summary, indent=2))
return
print("--- Timmy Gemma4 Mac Wiring ---")
print(f"Provider: {args.provider_name}")
print(f"Base URL: {base_url}")
print(f"Model: {args.model}")
print(f"Config path: {args.config_path}")
if "verify_response" in summary:
print(f"Verify response: {summary['verify_response']}")
if summary["actions"]:
print("Actions: " + ", ".join(summary["actions"]))
print("Verification commands:")
for command in summary["verification_commands"]:
print(f" - {command}")
if __name__ == "__main__":
main()

View File

@@ -1,85 +0,0 @@
from __future__ import annotations
import importlib.util
import json
import sys
from pathlib import Path
from unittest.mock import patch
ROOT = Path(__file__).resolve().parent.parent
SCRIPT = ROOT / "scripts" / "timmy_gemma4_mac.py"
README = ROOT / "scripts" / "README_big_brain.md"
def load_module():
spec = importlib.util.spec_from_file_location("timmy_gemma4_mac", str(SCRIPT))
mod = importlib.util.module_from_spec(spec)
sys.modules["timmy_gemma4_mac"] = mod
spec.loader.exec_module(mod)
return mod
class _FakeResponse:
def __init__(self, payload: dict):
self._payload = json.dumps(payload).encode()
def read(self) -> bytes:
return self._payload
def __enter__(self):
return self
def __exit__(self, exc_type, exc, tb):
return False
def test_script_exists() -> None:
assert SCRIPT.exists(), "scripts/timmy_gemma4_mac.py must exist"
def test_default_paths_target_timmy_mac_hermes() -> None:
mod = load_module()
assert mod.DEFAULT_CONFIG_PATH == Path.home() / ".hermes" / "config.yaml"
assert mod.DEFAULT_TOKEN_FILE == Path.home() / ".config" / "runpod" / "access_key"
def test_choose_base_url_prefers_vertex_then_explicit_then_runpod() -> None:
mod = load_module()
assert mod.choose_base_url(vertex_base_url="https://vertex-proxy.example/v1") == "https://vertex-proxy.example/v1"
assert mod.choose_base_url(base_url="https://custom-endpoint/v1") == "https://custom-endpoint/v1"
assert mod.choose_base_url(pod_id="abc123") == "https://abc123-11434.proxy.runpod.net/v1"
def test_build_summary_includes_prove_it_commands() -> None:
mod = load_module()
summary = mod.build_summary(base_url="https://vertex-proxy.example/v1", model="gemma4:latest")
assert summary["verification_commands"][0] == "python3 scripts/verify_big_brain.py"
assert any("hermes chat --model gemma4 --provider big_brain" in cmd for cmd in summary["verification_commands"])
def test_verify_openai_chat_targets_chat_completions() -> None:
mod = load_module()
response_payload = {
"choices": [{"message": {"content": "READY"}}]
}
with patch("timmy_gemma4_mac.request.urlopen", return_value=_FakeResponse(response_payload)) as mocked:
result = mod.verify_openai_chat("https://vertex-proxy.example/v1", model="gemma4:latest", prompt="say READY")
assert result == "READY"
req = mocked.call_args.args[0]
assert req.full_url == "https://vertex-proxy.example/v1/chat/completions"
def test_readme_mentions_timmy_mac_wiring_flow() -> None:
text = README.read_text(encoding="utf-8")
required = [
"scripts/timmy_gemma4_mac.py",
"--vertex-base-url",
"--write-config",
"python3 scripts/verify_big_brain.py",
"hermes chat --model gemma4 --provider big_brain",
]
missing = [item for item in required if item not in text]
assert not missing, missing

View File

@@ -0,0 +1,52 @@
from importlib.util import module_from_spec, spec_from_file_location
from pathlib import Path
import unittest
ROOT = Path(__file__).resolve().parent.parent
GAME_PATH = ROOT / "evennia" / "timmy_world" / "game.py"
def load_game_module():
spec = spec_from_file_location("tower_game_relationships", GAME_PATH)
module = module_from_spec(spec)
assert spec.loader is not None
spec.loader.exec_module(module)
module.random.seed(0)
return module
class TestTowerGameNpcRelationships(unittest.TestCase):
def test_each_npc_tracks_trust_for_every_other_npc(self):
module = load_game_module()
world = module.World()
npc_names = [name for name, char in world.characters.items() if not char.get("is_player", False)]
for npc_name in npc_names:
with self.subTest(npc=npc_name):
trust_map = world.characters[npc_name]["trust"]
expected = set(npc_names) - {npc_name}
self.assertTrue(expected.issubset(set(trust_map)), f"{npc_name} missing NPC trust keys: {sorted(expected - set(trust_map))}")
def test_offscreen_npc_conversations_create_friendship_and_tension(self):
module = load_game_module()
engine = module.GameEngine()
engine.start_new_game()
result = engine.run_tick("look")
friendships = {tuple(rel["pair"]) for rel in engine.world.state["npc_friendships"]}
tensions = {tuple(rel["pair"]) for rel in engine.world.state["npc_tensions"]}
self.assertIn(("Kimi", "Marcus"), friendships)
self.assertIn(("Bezalel", "ClawCode"), tensions)
self.assertTrue(any("while you are away" in line.lower() for line in result["world_events"]))
garden_desc = engine.world.get_room_desc("Garden", "Timmy")
forge_desc = engine.world.get_room_desc("Forge", "Timmy")
self.assertIn("Marcus and Kimi", garden_desc)
self.assertIn("Bezalel and ClawCode", forge_desc)
if __name__ == "__main__":
unittest.main()