Compare commits
2 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
24985a29db | ||
|
|
d6c90df391 |
@@ -1059,46 +1059,6 @@ class GameEngine:
|
||||
self.log("It will always pulse. That much you know.")
|
||||
self.log("")
|
||||
self.world.save()
|
||||
|
||||
def _bridge_is_hazardous(self):
|
||||
bridge = self.world.rooms["Bridge"]
|
||||
return bool(
|
||||
self.world.state.get("bridge_flooding")
|
||||
or bridge.get("weather") == "rain"
|
||||
or bridge.get("rain_ticks", 0) > 0
|
||||
)
|
||||
|
||||
def _bridge_crossing_extra_cost(self, current_room, dest):
|
||||
if "Bridge" not in (current_room, dest):
|
||||
return 0
|
||||
return 2 if self._bridge_is_hazardous() else 0
|
||||
|
||||
def _event_dialogue(self, char_name, room_name):
|
||||
if char_name == "Bezalel" and room_name == "Forge":
|
||||
if self.world.rooms["Forge"]["fire"] == "cold":
|
||||
return random.choice([
|
||||
"The forge is cold. We cannot work until the fire lives again.",
|
||||
"No forging now. The hearth is dead cold.",
|
||||
])
|
||||
if self.world.state.get("forge_fire_dying"):
|
||||
return random.choice([
|
||||
"The fire is dying. Tend it before the forge goes dark.",
|
||||
"The forge is losing heat. Help me keep it alive.",
|
||||
])
|
||||
|
||||
if char_name == "Ezra" and room_name == "Tower" and self.world.state.get("tower_power_low"):
|
||||
return random.choice([
|
||||
"The Tower power is too low. The servers won't hold a clean study right now.",
|
||||
"The LED is flickering. We need steady power before the Tower can be read properly.",
|
||||
])
|
||||
|
||||
if char_name in {"Marcus", "Allegro"} and room_name == "Bridge" and self._bridge_is_hazardous():
|
||||
return random.choice([
|
||||
"The Bridge is slick with rain. Cross carefully or wait it out.",
|
||||
"This rain changes the Bridge. Don't treat it like dry stone.",
|
||||
])
|
||||
|
||||
return None
|
||||
|
||||
def log(self, message):
|
||||
"""Add to Timmy's log."""
|
||||
@@ -1134,7 +1094,6 @@ class GameEngine:
|
||||
}
|
||||
|
||||
# Process Timmy's action
|
||||
room_name = self.world.characters["Timmy"]["room"]
|
||||
timmy_energy = self.world.characters["Timmy"]["energy"]
|
||||
|
||||
# Energy constraint checks
|
||||
@@ -1197,17 +1156,8 @@ class GameEngine:
|
||||
|
||||
if direction in connections:
|
||||
dest = connections[direction]
|
||||
bridge_extra_cost = self._bridge_crossing_extra_cost(current_room, dest)
|
||||
move_cost = 1 + bridge_extra_cost
|
||||
if self.world.characters["Timmy"]["energy"] < move_cost:
|
||||
scene["log"].append("The rain makes the Bridge too costly to cross right now. Rest first.")
|
||||
scene["room_desc"] = self.world.get_room_desc(current_room, "Timmy")
|
||||
here = [n for n in self.world.characters if self.world.characters[n]["room"] == current_room and n != "Timmy"]
|
||||
scene["here"] = here
|
||||
return scene
|
||||
|
||||
self.world.characters["Timmy"]["room"] = dest
|
||||
self.world.characters["Timmy"]["energy"] -= move_cost
|
||||
self.world.characters["Timmy"]["energy"] -= 1
|
||||
|
||||
scene["log"].append(f"You move {direction} to The {dest}.")
|
||||
scene["timmy_room"] = dest
|
||||
@@ -1215,8 +1165,6 @@ class GameEngine:
|
||||
# Check for rain on bridge
|
||||
if dest == "Bridge" and self.world.rooms["Bridge"]["weather"] == "rain":
|
||||
scene["world_events"].append("Rain mists on the dark water below. The railing is slick.")
|
||||
if bridge_extra_cost:
|
||||
scene["log"].append("Rain turns the Bridge crossing into work. You brace against the slick stone. (-2 extra energy)")
|
||||
|
||||
# Check trust changes for arrival
|
||||
here = [n for n in self.world.characters if self.world.characters[n]["room"] == dest and n != "Timmy"]
|
||||
@@ -1362,69 +1310,25 @@ class GameEngine:
|
||||
|
||||
elif timmy_action == "write_rule":
|
||||
if self.world.characters["Timmy"]["room"] == "Tower":
|
||||
if self.world.state.get("tower_power_low"):
|
||||
scene["world_events"].append("The Tower power is too low. The LED flickers over the whiteboard.")
|
||||
scene["log"].append("The power is too low to write a new rule.")
|
||||
else:
|
||||
rules = [
|
||||
f"Rule #{self.world.tick}: The room remembers those who enter it.",
|
||||
f"Rule #{self.world.tick}: A man in the dark needs to know someone is in the room.",
|
||||
f"Rule #{self.world.tick}: The forge does not care about your schedule.",
|
||||
f"Rule #{self.world.tick}: Every footprint on the stone means someone made it here.",
|
||||
f"Rule #{self.world.tick}: The bridge does not judge. It only carries.",
|
||||
f"Rule #{self.world.tick}: A seed planted in patience grows in time.",
|
||||
f"Rule #{self.world.tick}: What is carved in wood outlasts what is said in anger.",
|
||||
f"Rule #{self.world.tick}: The garden grows whether anyone watches or not.",
|
||||
f"Rule #{self.world.tick}: Trust is built one tick at a time.",
|
||||
f"Rule #{self.world.tick}: The fire remembers who tended it.",
|
||||
]
|
||||
new_rule = random.choice(rules)
|
||||
self.world.rooms["Tower"]["messages"].append(new_rule)
|
||||
self.world.characters["Timmy"]["energy"] -= 1
|
||||
scene["log"].append(f"You write on the Tower whiteboard: \"{new_rule}\"")
|
||||
rules = [
|
||||
f"Rule #{self.world.tick}: The room remembers those who enter it.",
|
||||
f"Rule #{self.world.tick}: A man in the dark needs to know someone is in the room.",
|
||||
f"Rule #{self.world.tick}: The forge does not care about your schedule.",
|
||||
f"Rule #{self.world.tick}: Every footprint on the stone means someone made it here.",
|
||||
f"Rule #{self.world.tick}: The bridge does not judge. It only carries.",
|
||||
f"Rule #{self.world.tick}: A seed planted in patience grows in time.",
|
||||
f"Rule #{self.world.tick}: What is carved in wood outlasts what is said in anger.",
|
||||
f"Rule #{self.world.tick}: The garden grows whether anyone watches or not.",
|
||||
f"Rule #{self.world.tick}: Trust is built one tick at a time.",
|
||||
f"Rule #{self.world.tick}: The fire remembers who tended it.",
|
||||
]
|
||||
new_rule = random.choice(rules)
|
||||
self.world.rooms["Tower"]["messages"].append(new_rule)
|
||||
self.world.characters["Timmy"]["energy"] -= 1
|
||||
scene["log"].append(f"You write on the Tower whiteboard: \"{new_rule}\"")
|
||||
else:
|
||||
scene["log"].append("You are not in the Tower.")
|
||||
|
||||
elif timmy_action == "study":
|
||||
if self.world.characters["Timmy"]["room"] == "Tower":
|
||||
if self.world.state.get("tower_power_low"):
|
||||
scene["world_events"].append("The Tower power is too low. The servers stutter in weak light.")
|
||||
scene["log"].append("The power is too low to study the servers.")
|
||||
else:
|
||||
insights = [
|
||||
"You study the server rhythm until the pulse resolves into something readable.",
|
||||
"You trace the signal paths and feel the Tower settle into focus.",
|
||||
"You study the green LED and the server racks until the pattern becomes clear.",
|
||||
]
|
||||
insight = random.choice(insights)
|
||||
self.world.characters["Timmy"]["energy"] -= 1
|
||||
self.world.characters["Timmy"]["memories"].append(insight)
|
||||
scene["log"].append(insight)
|
||||
scene["world_events"].append("The Tower answers with a steady hum.")
|
||||
else:
|
||||
scene["log"].append("You are not in the Tower.")
|
||||
|
||||
elif timmy_action == "forge":
|
||||
if self.world.characters["Timmy"]["room"] == "Forge":
|
||||
forge_fire = self.world.rooms["Forge"]["fire"]
|
||||
if forge_fire == "cold":
|
||||
scene["world_events"].append("The forge is cold. No metal will take shape here yet.")
|
||||
scene["log"].append("The forge is cold. Tend the fire before you try to forge.")
|
||||
else:
|
||||
forged_items = [
|
||||
f"bridge nail #{self.world.tick}",
|
||||
f"tower key blank #{self.world.tick}",
|
||||
f"garden trowel #{self.world.tick}",
|
||||
]
|
||||
forged_item = random.choice(forged_items)
|
||||
self.world.rooms["Forge"]["forged_items"].append(forged_item)
|
||||
self.world.characters["Timmy"]["energy"] -= 2
|
||||
self.world.state["items_crafted"] += 1
|
||||
scene["log"].append(f"You forge {forged_item} at the anvil.")
|
||||
scene["world_events"].append("The anvil rings and the hearth answers.")
|
||||
else:
|
||||
scene["log"].append("You are not in the Forge.")
|
||||
|
||||
elif timmy_action == "carve":
|
||||
if self.world.characters["Timmy"]["room"] == "Bridge":
|
||||
carvings = [
|
||||
@@ -1510,11 +1414,7 @@ class GameEngine:
|
||||
speech_chance = 0.20
|
||||
|
||||
if random.random() < speech_chance:
|
||||
event_line = self._event_dialogue(char_name, room_name)
|
||||
if event_line:
|
||||
self.world.characters[char_name]["spoken"].append(event_line)
|
||||
scene["log"].append(f"{char_name} says: \"{event_line}\"")
|
||||
elif char_name == "Marcus":
|
||||
if char_name == "Marcus":
|
||||
marcus_pool = self.DIALOGUES["Marcus"].get(phase, self.DIALOGUES["Marcus"]["quietus"])
|
||||
line = random.choice(marcus_pool)
|
||||
self.world.characters[char_name]["spoken"].append(line)
|
||||
|
||||
@@ -62,6 +62,24 @@ Writes:
|
||||
|
||||
## Usage
|
||||
|
||||
### Timmy Mac wiring helper
|
||||
|
||||
Use the dedicated Timmy helper when you want to wire a real RunPod or Vertex-style endpoint into the local Mac Hermes config:
|
||||
|
||||
```bash
|
||||
python3 scripts/timmy_gemma4_mac.py --base-url https://your-openai-bridge.example/v1 --write-config
|
||||
python3 scripts/timmy_gemma4_mac.py --vertex-base-url https://your-vertex-bridge.example --write-config
|
||||
python3 scripts/timmy_gemma4_mac.py --pod-id <runpod-id> --write-config --verify-chat
|
||||
```
|
||||
|
||||
The helper writes to `~/.hermes/config.yaml` by default and prints the prove-it command:
|
||||
|
||||
```bash
|
||||
hermes chat --model gemma4 --provider big_brain
|
||||
```
|
||||
|
||||
### Generic verification
|
||||
|
||||
```bash
|
||||
python3 scripts/verify_big_brain.py
|
||||
python3 scripts/big_brain_manager.py
|
||||
|
||||
164
scripts/timmy_gemma4_mac.py
Normal file
164
scripts/timmy_gemma4_mac.py
Normal file
@@ -0,0 +1,164 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Timmy Mac Gemma 4 wiring helper for RunPod / Vertex-style Big Brain providers.
|
||||
|
||||
Refs: timmy-home #543
|
||||
|
||||
Safe by default:
|
||||
- computes a Big Brain base URL from an explicit URL, Vertex bridge URL, or RunPod pod id
|
||||
- can provision a RunPod pod when --apply-runpod is used and a token is available
|
||||
- can write the resolved endpoint into a Hermes config when --write-config is used
|
||||
- can verify an OpenAI-compatible chat endpoint when --verify-chat is used
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from urllib import request
|
||||
|
||||
from scripts.bezalel_gemma4_vps import (
|
||||
DEFAULT_CLOUD_TYPE,
|
||||
DEFAULT_GPU_TYPE,
|
||||
DEFAULT_MODEL,
|
||||
DEFAULT_PROVIDER_NAME,
|
||||
build_runpod_endpoint,
|
||||
deploy_runpod,
|
||||
update_config_text,
|
||||
)
|
||||
|
||||
DEFAULT_TOKEN_FILE = Path.home() / ".config" / "runpod" / "access_key"
|
||||
DEFAULT_CONFIG_PATH = Path.home() / ".hermes" / "config.yaml"
|
||||
|
||||
|
||||
def _normalize_openai_base(base_url: str | None) -> str:
|
||||
if not base_url:
|
||||
return ""
|
||||
cleaned = str(base_url).strip().rstrip("/")
|
||||
return cleaned if cleaned.endswith("/v1") else f"{cleaned}/v1"
|
||||
|
||||
|
||||
def choose_base_url(*, vertex_base_url: str | None = None, base_url: str | None = None, pod_id: str | None = None) -> str:
|
||||
if vertex_base_url:
|
||||
return _normalize_openai_base(vertex_base_url)
|
||||
if base_url:
|
||||
return _normalize_openai_base(base_url)
|
||||
if pod_id:
|
||||
return build_runpod_endpoint(pod_id)
|
||||
return "https://YOUR_BIG_BRAIN_HOST/v1"
|
||||
|
||||
|
||||
def write_config_file(config_path: Path, *, base_url: str, model: str = DEFAULT_MODEL, provider_name: str = DEFAULT_PROVIDER_NAME) -> str:
|
||||
original = config_path.read_text() if config_path.exists() else ""
|
||||
updated = update_config_text(original, base_url=base_url, model=model, provider_name=provider_name)
|
||||
config_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
config_path.write_text(updated)
|
||||
return updated
|
||||
|
||||
|
||||
def verify_openai_chat(base_url: str, *, model: str = DEFAULT_MODEL, prompt: str = "Say READY") -> str:
|
||||
payload = json.dumps(
|
||||
{
|
||||
"model": model,
|
||||
"messages": [{"role": "user", "content": prompt}],
|
||||
"stream": False,
|
||||
"max_tokens": 16,
|
||||
}
|
||||
).encode()
|
||||
req = request.Request(
|
||||
f"{base_url.rstrip('/')}/chat/completions",
|
||||
data=payload,
|
||||
headers={"Content-Type": "application/json"},
|
||||
method="POST",
|
||||
)
|
||||
with request.urlopen(req, timeout=30) as resp:
|
||||
data = json.loads(resp.read().decode())
|
||||
return data["choices"][0]["message"]["content"]
|
||||
|
||||
|
||||
def build_summary(*, base_url: str, model: str, provider_name: str = DEFAULT_PROVIDER_NAME, config_path: Path = DEFAULT_CONFIG_PATH) -> dict[str, Any]:
|
||||
return {
|
||||
"provider_name": provider_name,
|
||||
"base_url": base_url,
|
||||
"model": model,
|
||||
"config_path": str(config_path),
|
||||
"verification_commands": [
|
||||
"python3 scripts/verify_big_brain.py",
|
||||
f"python3 scripts/timmy_gemma4_mac.py --base-url {base_url} --write-config --verify-chat",
|
||||
"hermes chat --model gemma4 --provider big_brain",
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Wire a RunPod/Vertex Gemma 4 endpoint into Timmy's Mac Hermes config.")
|
||||
parser.add_argument("--pod-name", default="timmy-gemma4")
|
||||
parser.add_argument("--gpu-type", default=DEFAULT_GPU_TYPE)
|
||||
parser.add_argument("--cloud-type", default=DEFAULT_CLOUD_TYPE)
|
||||
parser.add_argument("--model", default=DEFAULT_MODEL)
|
||||
parser.add_argument("--provider-name", default=DEFAULT_PROVIDER_NAME)
|
||||
parser.add_argument("--token-file", type=Path, default=DEFAULT_TOKEN_FILE)
|
||||
parser.add_argument("--config-path", type=Path, default=DEFAULT_CONFIG_PATH)
|
||||
parser.add_argument("--pod-id", help="Existing RunPod pod id to convert into an OpenAI-compatible base URL")
|
||||
parser.add_argument("--base-url", help="Explicit OpenAI-compatible base URL")
|
||||
parser.add_argument("--vertex-base-url", help="Vertex AI OpenAI-compatible bridge base URL")
|
||||
parser.add_argument("--apply-runpod", action="store_true", help="Provision a RunPod pod using the RunPod GraphQL API")
|
||||
parser.add_argument("--write-config", action="store_true", help="Write the resolved endpoint into --config-path")
|
||||
parser.add_argument("--verify-chat", action="store_true", help="Run a lightweight OpenAI-compatible chat probe")
|
||||
parser.add_argument("--json", action="store_true", help="Emit machine-readable JSON")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> None:
|
||||
args = parse_args()
|
||||
summary: dict[str, Any] = {
|
||||
"pod_name": args.pod_name,
|
||||
"gpu_type": args.gpu_type,
|
||||
"cloud_type": args.cloud_type,
|
||||
"model": args.model,
|
||||
"provider_name": args.provider_name,
|
||||
"actions": [],
|
||||
}
|
||||
|
||||
base_url = choose_base_url(vertex_base_url=args.vertex_base_url, base_url=args.base_url, pod_id=args.pod_id)
|
||||
|
||||
if args.apply_runpod:
|
||||
if not args.token_file.exists():
|
||||
raise SystemExit(f"RunPod token file not found: {args.token_file}")
|
||||
api_key = args.token_file.read_text().strip()
|
||||
deployed = deploy_runpod(api_key=api_key, name=args.pod_name, gpu_type=args.gpu_type, cloud_type=args.cloud_type, model=args.model)
|
||||
summary["deployment"] = deployed
|
||||
base_url = deployed["base_url"]
|
||||
summary["actions"].append("deployed_runpod_pod")
|
||||
|
||||
summary.update(build_summary(base_url=base_url, model=args.model, provider_name=args.provider_name, config_path=args.config_path))
|
||||
|
||||
if args.write_config:
|
||||
write_config_file(args.config_path, base_url=base_url, model=args.model, provider_name=args.provider_name)
|
||||
summary["actions"].append("wrote_config")
|
||||
|
||||
if args.verify_chat:
|
||||
summary["verify_response"] = verify_openai_chat(base_url, model=args.model)
|
||||
summary["actions"].append("verified_chat")
|
||||
|
||||
if args.json:
|
||||
print(json.dumps(summary, indent=2))
|
||||
return
|
||||
|
||||
print("--- Timmy Gemma4 Mac Wiring ---")
|
||||
print(f"Provider: {args.provider_name}")
|
||||
print(f"Base URL: {base_url}")
|
||||
print(f"Model: {args.model}")
|
||||
print(f"Config path: {args.config_path}")
|
||||
if "verify_response" in summary:
|
||||
print(f"Verify response: {summary['verify_response']}")
|
||||
if summary["actions"]:
|
||||
print("Actions: " + ", ".join(summary["actions"]))
|
||||
print("Verification commands:")
|
||||
for command in summary["verification_commands"]:
|
||||
print(f" - {command}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,7 +1,6 @@
|
||||
from importlib.util import module_from_spec, spec_from_file_location
|
||||
from pathlib import Path
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parent.parent
|
||||
@@ -67,82 +66,6 @@ class TestEvenniaLocalWorldGame(unittest.TestCase):
|
||||
self.assertIn("Ezra is already here.", result["log"])
|
||||
self.assertIn("The servers hum steady. The green LED pulses.", result["world_events"])
|
||||
|
||||
def test_bridge_rain_crossing_costs_extra_energy_and_warns(self):
|
||||
module = load_game_module()
|
||||
|
||||
dry_engine = module.GameEngine()
|
||||
dry_engine.start_new_game()
|
||||
dry_engine.world.update_world_state = lambda: None
|
||||
dry_engine.world.characters["Timmy"]["energy"] = 10
|
||||
dry_result = dry_engine.run_tick("move:south")
|
||||
dry_energy = dry_engine.world.characters["Timmy"]["energy"]
|
||||
|
||||
rainy_engine = module.GameEngine()
|
||||
rainy_engine.start_new_game()
|
||||
rainy_engine.world.update_world_state = lambda: None
|
||||
rainy_engine.world.characters["Timmy"]["energy"] = 10
|
||||
rainy_engine.world.rooms["Bridge"]["weather"] = "rain"
|
||||
rainy_engine.world.rooms["Bridge"]["rain_ticks"] = 3
|
||||
rainy_engine.world.state["bridge_flooding"] = True
|
||||
rainy_result = rainy_engine.run_tick("move:south")
|
||||
|
||||
self.assertEqual(rainy_engine.world.characters["Timmy"]["room"], "Bridge")
|
||||
self.assertLess(rainy_engine.world.characters["Timmy"]["energy"], dry_energy)
|
||||
self.assertTrue(
|
||||
any("bridge" in line.lower() and ("rain" in line.lower() or "slick" in line.lower()) for line in rainy_result["log"] + rainy_result["world_events"]),
|
||||
rainy_result,
|
||||
)
|
||||
|
||||
def test_tower_power_low_blocks_study_and_write_rule(self):
|
||||
module = load_game_module()
|
||||
engine = module.GameEngine()
|
||||
engine.start_new_game()
|
||||
engine.world.update_world_state = lambda: None
|
||||
engine.world.characters["Timmy"]["room"] = "Tower"
|
||||
engine.world.characters["Timmy"]["energy"] = 10
|
||||
engine.world.state["tower_power_low"] = True
|
||||
|
||||
rules_before = list(engine.world.rooms["Tower"]["messages"])
|
||||
study_result = engine.run_tick("study")
|
||||
self.assertEqual(engine.world.characters["Timmy"]["energy"], 10)
|
||||
self.assertTrue(
|
||||
any("power" in line.lower() and ("study" in line.lower() or "servers" in line.lower()) for line in study_result["log"] + study_result["world_events"]),
|
||||
study_result,
|
||||
)
|
||||
|
||||
write_result = engine.run_tick("write_rule")
|
||||
self.assertEqual(engine.world.rooms["Tower"]["messages"], rules_before)
|
||||
self.assertTrue(
|
||||
any("power" in line.lower() and ("write" in line.lower() or "whiteboard" in line.lower()) for line in write_result["log"] + write_result["world_events"]),
|
||||
write_result,
|
||||
)
|
||||
|
||||
def test_cold_forge_blocks_forge_action_and_bezalel_reacts(self):
|
||||
module = load_game_module()
|
||||
engine = module.GameEngine()
|
||||
engine.start_new_game()
|
||||
engine.world.update_world_state = lambda: None
|
||||
engine.npc_ai.make_choice = lambda _name: None
|
||||
engine.world.characters["Timmy"]["room"] = "Forge"
|
||||
engine.world.characters["Timmy"]["energy"] = 10
|
||||
engine.world.characters["Bezalel"]["room"] = "Forge"
|
||||
engine.world.rooms["Forge"]["fire"] = "cold"
|
||||
engine.world.state["forge_fire_dying"] = True
|
||||
forged_before = list(engine.world.rooms["Forge"]["forged_items"])
|
||||
|
||||
with patch.object(module.random, "random", return_value=0.0), patch.object(module.random, "choice", side_effect=lambda seq: seq[0]):
|
||||
result = engine.run_tick("forge")
|
||||
|
||||
self.assertEqual(engine.world.rooms["Forge"]["forged_items"], forged_before)
|
||||
self.assertTrue(
|
||||
any("forge" in line.lower() and ("cold" in line.lower() or "fire" in line.lower()) for line in result["log"] + result["world_events"]),
|
||||
result,
|
||||
)
|
||||
self.assertTrue(
|
||||
any(line.startswith("Bezalel says:") and ("fire" in line.lower() or "forge" in line.lower()) for line in result["log"]),
|
||||
result,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
85
tests/test_timmy_gemma4_mac.py
Normal file
85
tests/test_timmy_gemma4_mac.py
Normal file
@@ -0,0 +1,85 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib.util
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parent.parent
|
||||
SCRIPT = ROOT / "scripts" / "timmy_gemma4_mac.py"
|
||||
README = ROOT / "scripts" / "README_big_brain.md"
|
||||
|
||||
|
||||
def load_module():
|
||||
spec = importlib.util.spec_from_file_location("timmy_gemma4_mac", str(SCRIPT))
|
||||
mod = importlib.util.module_from_spec(spec)
|
||||
sys.modules["timmy_gemma4_mac"] = mod
|
||||
spec.loader.exec_module(mod)
|
||||
return mod
|
||||
|
||||
|
||||
class _FakeResponse:
|
||||
def __init__(self, payload: dict):
|
||||
self._payload = json.dumps(payload).encode()
|
||||
|
||||
def read(self) -> bytes:
|
||||
return self._payload
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc, tb):
|
||||
return False
|
||||
|
||||
|
||||
def test_script_exists() -> None:
|
||||
assert SCRIPT.exists(), "scripts/timmy_gemma4_mac.py must exist"
|
||||
|
||||
|
||||
def test_default_paths_target_timmy_mac_hermes() -> None:
|
||||
mod = load_module()
|
||||
assert mod.DEFAULT_CONFIG_PATH == Path.home() / ".hermes" / "config.yaml"
|
||||
assert mod.DEFAULT_TOKEN_FILE == Path.home() / ".config" / "runpod" / "access_key"
|
||||
|
||||
|
||||
def test_choose_base_url_prefers_vertex_then_explicit_then_runpod() -> None:
|
||||
mod = load_module()
|
||||
assert mod.choose_base_url(vertex_base_url="https://vertex-proxy.example/v1") == "https://vertex-proxy.example/v1"
|
||||
assert mod.choose_base_url(base_url="https://custom-endpoint/v1") == "https://custom-endpoint/v1"
|
||||
assert mod.choose_base_url(pod_id="abc123") == "https://abc123-11434.proxy.runpod.net/v1"
|
||||
|
||||
|
||||
def test_build_summary_includes_prove_it_commands() -> None:
|
||||
mod = load_module()
|
||||
summary = mod.build_summary(base_url="https://vertex-proxy.example/v1", model="gemma4:latest")
|
||||
assert summary["verification_commands"][0] == "python3 scripts/verify_big_brain.py"
|
||||
assert any("hermes chat --model gemma4 --provider big_brain" in cmd for cmd in summary["verification_commands"])
|
||||
|
||||
|
||||
def test_verify_openai_chat_targets_chat_completions() -> None:
|
||||
mod = load_module()
|
||||
response_payload = {
|
||||
"choices": [{"message": {"content": "READY"}}]
|
||||
}
|
||||
|
||||
with patch("timmy_gemma4_mac.request.urlopen", return_value=_FakeResponse(response_payload)) as mocked:
|
||||
result = mod.verify_openai_chat("https://vertex-proxy.example/v1", model="gemma4:latest", prompt="say READY")
|
||||
|
||||
assert result == "READY"
|
||||
req = mocked.call_args.args[0]
|
||||
assert req.full_url == "https://vertex-proxy.example/v1/chat/completions"
|
||||
|
||||
|
||||
def test_readme_mentions_timmy_mac_wiring_flow() -> None:
|
||||
text = README.read_text(encoding="utf-8")
|
||||
required = [
|
||||
"scripts/timmy_gemma4_mac.py",
|
||||
"--vertex-base-url",
|
||||
"--write-config",
|
||||
"python3 scripts/verify_big_brain.py",
|
||||
"hermes chat --model gemma4 --provider big_brain",
|
||||
]
|
||||
missing = [item for item in required if item not in text]
|
||||
assert not missing, missing
|
||||
Reference in New Issue
Block a user