2026-03-28 05:33:47 +00:00
|
|
|
from __future__ import annotations
|
|
|
|
|
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
|
|
|
|
|
import yaml
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_config_defaults_to_local_llama_cpp_runtime() -> None:
|
|
|
|
|
config = yaml.safe_load(Path("config.yaml").read_text())
|
|
|
|
|
|
|
|
|
|
assert config["model"]["provider"] == "custom"
|
|
|
|
|
assert config["model"]["default"] == "hermes4:14b"
|
|
|
|
|
assert config["model"]["base_url"] == "http://localhost:8081/v1"
|
|
|
|
|
|
|
|
|
|
local_provider = next(
|
|
|
|
|
entry for entry in config["custom_providers"] if entry["name"] == "Local llama.cpp"
|
|
|
|
|
)
|
|
|
|
|
assert local_provider["model"] == "hermes4:14b"
|
|
|
|
|
|
2026-03-31 08:29:58 -04:00
|
|
|
assert config["fallback_model"]["provider"] == "ollama"
|
|
|
|
|
assert config["fallback_model"]["model"] == "hermes3:latest"
|
|
|
|
|
assert "localhost" in config["fallback_model"]["base_url"]
|