22 lines
680 B
Python
22 lines
680 B
Python
|
|
from __future__ import annotations
|
||
|
|
|
||
|
|
from pathlib import Path
|
||
|
|
|
||
|
|
import yaml
|
||
|
|
|
||
|
|
|
||
|
|
def test_config_defaults_to_local_llama_cpp_runtime() -> None:
|
||
|
|
config = yaml.safe_load(Path("config.yaml").read_text())
|
||
|
|
|
||
|
|
assert config["model"]["provider"] == "custom"
|
||
|
|
assert config["model"]["default"] == "hermes4:14b"
|
||
|
|
assert config["model"]["base_url"] == "http://localhost:8081/v1"
|
||
|
|
|
||
|
|
local_provider = next(
|
||
|
|
entry for entry in config["custom_providers"] if entry["name"] == "Local llama.cpp"
|
||
|
|
)
|
||
|
|
assert local_provider["model"] == "hermes4:14b"
|
||
|
|
|
||
|
|
assert config["fallback_model"]["provider"] == "custom"
|
||
|
|
assert config["fallback_model"]["model"] == "gemini-2.5-pro"
|