feat: validate /model against live API instead of hardcoded lists

Replace the static catalog-based model validation with a live API probe.
The /model command now hits the provider's /models endpoint to check if
the requested model actually exists:

- Model found in API → accepted + saved to config
- Model NOT found in API → rejected with 'Error: not a valid model'
  and fuzzy-match suggestions from the live model list
- API unreachable → graceful fallback to hardcoded catalog (session-only
  for unrecognized models)
- Format errors (empty, spaces, missing '/') still caught instantly
  without a network call

The API probe takes ~0.2s for OpenRouter (346 models) and works with any
OpenAI-compatible endpoint (Ollama, vLLM, custom, etc.).

32 tests covering all paths: format checks, API found, API not found,
API unreachable fallback, CLI integration.
This commit is contained in:
teknium1
2026-03-08 05:22:15 -07:00
parent 77f47768dd
commit 245d174359
4 changed files with 226 additions and 168 deletions

1
cli.py
View File

@@ -2079,6 +2079,7 @@ class HermesCLI:
validation = validate_requested_model(
new_model,
provider_for_validation,
api_key=self.api_key,
base_url=self.base_url,
)
except Exception:

View File

@@ -7,6 +7,9 @@ Add, remove, or reorder entries here — both `hermes setup` and
from __future__ import annotations
import json
import urllib.request
import urllib.error
from difflib import get_close_matches
from typing import Any, Optional
@@ -106,14 +109,46 @@ def provider_model_ids(provider: Optional[str]) -> list[str]:
return list(_PROVIDER_MODELS.get(normalized, []))
def fetch_api_models(
api_key: Optional[str],
base_url: Optional[str],
timeout: float = 5.0,
) -> Optional[list[str]]:
"""Fetch the list of available model IDs from the provider's ``/models`` endpoint.
Returns a list of model ID strings, or ``None`` if the endpoint could not
be reached (network error, timeout, auth failure, etc.).
"""
if not base_url:
return None
url = base_url.rstrip("/") + "/models"
headers: dict[str, str] = {}
if api_key:
headers["Authorization"] = f"Bearer {api_key}"
req = urllib.request.Request(url, headers=headers)
try:
with urllib.request.urlopen(req, timeout=timeout) as resp:
data = json.loads(resp.read().decode())
# Standard OpenAI format: {"data": [{"id": "model-name", ...}, ...]}
return [m.get("id", "") for m in data.get("data", [])]
except Exception:
return None
def validate_requested_model(
model_name: str,
provider: Optional[str],
*,
api_key: Optional[str] = None,
base_url: Optional[str] = None,
) -> dict[str, Any]:
"""
Validate a `/model` value for the active provider.
Validate a ``/model`` value for the active provider.
Performs format checks first, then probes the live API to confirm
the model actually exists.
Returns a dict with:
- accepted: whether the CLI should switch to the requested model now
@@ -142,29 +177,12 @@ def validate_requested_model(
"message": "Model names cannot contain spaces.",
}
known_models = provider_model_ids(normalized)
if requested in known_models:
return {
"accepted": True,
"persist": True,
"recognized": True,
"message": None,
}
suggestion = get_close_matches(requested, known_models, n=1, cutoff=0.6)
suggestion_text = f" Did you mean `{suggestion[0]}`?" if suggestion else ""
provider_label = _PROVIDER_LABELS.get(normalized, normalized)
if normalized == "custom":
return {
"accepted": True,
"persist": True,
"recognized": False,
"message": None,
}
# OpenRouter requires provider/model format
if normalized == "openrouter":
if "/" not in requested or requested.startswith("/") or requested.endswith("/"):
known_models = provider_model_ids(normalized)
suggestion = get_close_matches(requested, known_models, n=1, cutoff=0.6)
suggestion_text = f" Did you mean `{suggestion[0]}`?" if suggestion else ""
return {
"accepted": False,
"persist": False,
@@ -175,47 +193,57 @@ def validate_requested_model(
f"{suggestion_text}"
),
}
# Probe the live API to check if the model actually exists
api_models = fetch_api_models(api_key, base_url)
if api_models is not None:
if requested in set(api_models):
# API confirmed the model exists
return {
"accepted": True,
"persist": True,
"recognized": True,
"message": None,
}
else:
# API responded but model is not listed
suggestions = get_close_matches(requested, api_models, n=3, cutoff=0.5)
suggestion_text = ""
if suggestions:
suggestion_text = "\n Did you mean: " + ", ".join(f"`{s}`" for s in suggestions)
return {
"accepted": False,
"persist": False,
"recognized": False,
"message": (
f"Error: `{requested}` is not a valid model for this provider."
f"{suggestion_text}"
),
}
# api_models is None — couldn't reach API, fall back to catalog check
provider_label = _PROVIDER_LABELS.get(normalized, normalized)
known_models = provider_model_ids(normalized)
if requested in known_models:
return {
"accepted": True,
"persist": False,
"recognized": False,
"message": (
f"`{requested}` is not in Hermes' curated {provider_label} model list. "
"Using it for this session only; config unchanged."
f"{suggestion_text}"
),
}
if normalized == "nous":
return {
"accepted": True,
"persist": False,
"recognized": False,
"message": (
f"Could not validate `{requested}` against the live {provider_label} catalog here. "
"Using it for this session only; config unchanged."
f"{suggestion_text}"
),
}
if known_models:
return {
"accepted": True,
"persist": False,
"recognized": False,
"message": (
f"`{requested}` is not in the known {provider_label} model list. "
"Using it for this session only; config unchanged."
f"{suggestion_text}"
),
"persist": True,
"recognized": True,
"message": None,
}
# Can't validate — accept for session only
suggestion = get_close_matches(requested, known_models, n=1, cutoff=0.6)
suggestion_text = f" Did you mean `{suggestion[0]}`?" if suggestion else ""
return {
"accepted": True,
"persist": False,
"recognized": False,
"message": (
f"Could not validate `{requested}` for provider {provider_label}. "
f"Could not validate `{requested}` against the live {provider_label} API. "
"Using it for this session only; config unchanged."
f"{suggestion_text}"
),

View File

@@ -1,12 +1,35 @@
"""Tests for provider-aware `/model` validation in hermes_cli.models."""
from unittest.mock import patch
from hermes_cli.models import (
fetch_api_models,
normalize_provider,
provider_model_ids,
validate_requested_model,
)
# -- helpers -----------------------------------------------------------------
# Simulated API model list for mocking fetch_api_models
FAKE_API_MODELS = [
"anthropic/claude-opus-4.6",
"anthropic/claude-sonnet-4.5",
"openai/gpt-5.4-pro",
"openai/gpt-5.4",
"google/gemini-3-pro-preview",
]
def _validate(model, provider="openrouter", api_models=FAKE_API_MODELS, **kw):
"""Shortcut: call validate_requested_model with mocked API."""
with patch("hermes_cli.models.fetch_api_models", return_value=api_models):
return validate_requested_model(model, provider, **kw)
# -- normalize_provider ------------------------------------------------------
class TestNormalizeProvider:
def test_defaults_to_openrouter(self):
assert normalize_provider(None) == "openrouter"
@@ -31,6 +54,8 @@ class TestNormalizeProvider:
assert normalize_provider("GLM") == "zai"
# -- provider_model_ids ------------------------------------------------------
class TestProviderModelIds:
def test_openrouter_returns_curated_list(self):
ids = provider_model_ids("openrouter")
@@ -48,120 +73,121 @@ class TestProviderModelIds:
assert provider_model_ids("glm") == provider_model_ids("zai")
class TestValidateRequestedModel:
# -- known models (happy path) ---------------------------------------
# -- fetch_api_models --------------------------------------------------------
def test_known_openrouter_model_accepted_and_persisted(self):
result = validate_requested_model("anthropic/claude-opus-4.6", "openrouter")
class TestFetchApiModels:
def test_returns_none_when_no_base_url(self):
assert fetch_api_models("key", None) is None
assert fetch_api_models("key", "") is None
assert result["accepted"] is True
assert result["persist"] is True
assert result["recognized"] is True
assert result["message"] is None
def test_returns_none_on_network_error(self):
with patch("hermes_cli.models.urllib.request.urlopen", side_effect=Exception("timeout")):
assert fetch_api_models("key", "https://example.com/v1") is None
# -- empty / whitespace ----------------------------------------------
# -- validate_requested_model — format checks (no API needed) ----------------
class TestValidateFormatChecks:
def test_empty_model_rejected(self):
result = validate_requested_model("", "openrouter")
result = _validate("")
assert result["accepted"] is False
assert "empty" in result["message"]
def test_whitespace_only_rejected(self):
result = validate_requested_model(" ", "openrouter")
result = _validate(" ")
assert result["accepted"] is False
assert "empty" in result["message"]
def test_model_with_spaces_rejected(self):
result = validate_requested_model("anthropic/ claude-opus", "openrouter")
result = _validate("anthropic/ claude-opus")
assert result["accepted"] is False
assert "spaces" in result["message"].lower()
# -- OpenRouter format validation ------------------------------------
def test_openrouter_requires_slash(self):
result = validate_requested_model("claude-opus-4.6", "openrouter")
result = _validate("claude-opus-4.6")
assert result["accepted"] is False
assert result["persist"] is False
assert "provider/model" in result["message"]
def test_openrouter_rejects_leading_slash(self):
result = validate_requested_model("/claude-opus-4.6", "openrouter")
result = _validate("/claude-opus-4.6")
assert result["accepted"] is False
def test_openrouter_rejects_trailing_slash(self):
result = validate_requested_model("anthropic/", "openrouter")
result = _validate("anthropic/")
assert result["accepted"] is False
def test_openrouter_unknown_but_plausible_is_session_only(self):
result = validate_requested_model("anthropic/claude-next-gen", "openrouter")
assert result["accepted"] is True
assert result["persist"] is False
assert result["recognized"] is False
assert "session only" in result["message"].lower()
# -- custom endpoint -------------------------------------------------
def test_custom_base_url_accepts_anything(self):
result = validate_requested_model(
"my-local-model",
"openrouter",
base_url="http://localhost:11434/v1",
)
# -- validate_requested_model — API probe found model ------------------------
class TestValidateApiFound:
def test_model_found_in_api_is_accepted_and_persisted(self):
result = _validate("anthropic/claude-opus-4.6")
assert result["accepted"] is True
assert result["persist"] is True
assert result["recognized"] is True
assert result["message"] is None
# -- nous provider ---------------------------------------------------
def test_nous_provider_is_session_only(self):
result = validate_requested_model("hermes-3", "nous")
def test_model_found_in_api_for_custom_endpoint(self):
result = _validate(
"my-model",
provider="openrouter",
api_models=["my-model", "other-model"],
base_url="http://localhost:11434/v1",
)
assert result["accepted"] is True
assert result["persist"] is True
# -- validate_requested_model — API probe model not found --------------------
class TestValidateApiNotFound:
def test_model_not_in_api_is_rejected(self):
result = _validate("anthropic/claude-nonexistent")
assert result["accepted"] is False
assert result["persist"] is False
assert "Nous Portal" in result["message"]
assert "not a valid model" in result["message"]
# -- other providers with catalogs -----------------------------------
def test_rejection_includes_suggestions(self):
result = _validate("anthropic/claude-opus-4.5") # close to claude-opus-4.6
assert result["accepted"] is False
assert "Did you mean" in result["message"]
def test_known_zai_model_accepted_and_persisted(self):
result = validate_requested_model("glm-5", "zai")
def test_completely_wrong_model_rejected(self):
result = _validate("totally/fake-model-xyz")
assert result["accepted"] is False
assert "not a valid model" in result["message"]
# -- validate_requested_model — API unreachable (fallback) -------------------
class TestValidateApiFallback:
def test_known_catalog_model_accepted_when_api_down(self):
"""If API is unreachable, fall back to hardcoded catalog."""
result = _validate("anthropic/claude-opus-4.6", api_models=None)
assert result["accepted"] is True
assert result["persist"] is True
assert result["recognized"] is True
def test_unknown_zai_model_is_session_only(self):
result = validate_requested_model("glm-99", "zai")
def test_unknown_model_is_session_only_when_api_down(self):
result = _validate("anthropic/claude-next-gen", api_models=None)
assert result["accepted"] is True
assert result["persist"] is False
assert "Z.AI" in result["message"]
assert "Could not validate" in result["message"]
assert "session only" in result["message"].lower()
# -- provider with no catalog ----------------------------------------
def test_zai_known_model_accepted_when_api_down(self):
result = _validate("glm-5", provider="zai", api_models=None)
assert result["accepted"] is True
assert result["persist"] is True
assert result["recognized"] is True
def test_unknown_provider_is_session_only(self):
result = validate_requested_model("some-model", "totally-unknown")
def test_zai_unknown_model_session_only_when_api_down(self):
result = _validate("glm-99", provider="zai", api_models=None)
assert result["accepted"] is True
assert result["persist"] is False
def test_unknown_provider_session_only_when_api_down(self):
result = _validate("some-model", provider="totally-unknown", api_models=None)
assert result["accepted"] is True
assert result["persist"] is False
assert result["message"] is not None
# -- codex provider --------------------------------------------------
def test_unknown_codex_model_is_session_only(self):
result = validate_requested_model("totally-made-up", "openai-codex")
assert result["accepted"] is True
assert result["persist"] is False
assert "OpenAI Codex" in result["message"]
# -- fuzzy suggestions -----------------------------------------------
def test_close_match_gets_suggestion(self):
# Typo of a known model — should get a suggestion in the message
result = validate_requested_model("anthropic/claude-opus-4.5", "openrouter")
# May or may not match depending on cutoff, but should be session-only
assert result["accepted"] is True
assert result["persist"] is False

View File

@@ -13,58 +13,17 @@ class TestModelCommand:
cli_obj.provider = "openrouter"
cli_obj.requested_provider = "openrouter"
cli_obj.base_url = "https://openrouter.ai/api/v1"
cli_obj.api_key = "test-key"
cli_obj._explicit_api_key = None
cli_obj._explicit_base_url = None
return cli_obj
def test_invalid_model_does_not_change_current_model(self, capsys):
def test_valid_model_from_api_saved_to_config(self, capsys):
cli_obj = self._make_cli()
with patch("hermes_cli.auth.resolve_provider", return_value="openrouter"), \
patch("hermes_cli.models.validate_requested_model", return_value={
"accepted": False,
"persist": False,
"recognized": False,
"message": "OpenRouter model IDs should use the `provider/model` format.",
}), \
patch("cli.save_config_value") as save_mock:
cli_obj.process_command("/model invalid-model")
output = capsys.readouterr().out
assert "Current model unchanged" in output
assert cli_obj.model == "anthropic/claude-opus-4.6"
assert cli_obj.agent is not None
save_mock.assert_not_called()
def test_unknown_model_stays_session_only(self, capsys):
cli_obj = self._make_cli()
with patch("hermes_cli.auth.resolve_provider", return_value="openrouter"), \
patch("hermes_cli.models.validate_requested_model", return_value={
"accepted": True,
"persist": False,
"recognized": False,
"message": "Using it for this session only; config unchanged.",
}), \
patch("cli.save_config_value") as save_mock:
cli_obj.process_command("/model anthropic/claude-sonnet-next")
output = capsys.readouterr().out
assert "session only" in output
assert cli_obj.model == "anthropic/claude-sonnet-next"
assert cli_obj.agent is None
save_mock.assert_not_called()
def test_known_model_is_saved_to_config(self, capsys):
cli_obj = self._make_cli()
with patch("hermes_cli.auth.resolve_provider", return_value="openrouter"), \
patch("hermes_cli.models.validate_requested_model", return_value={
"accepted": True,
"persist": True,
"recognized": True,
"message": None,
}), \
patch("hermes_cli.models.fetch_api_models",
return_value=["anthropic/claude-sonnet-4.5", "openai/gpt-5.4"]), \
patch("cli.save_config_value", return_value=True) as save_mock:
cli_obj.process_command("/model anthropic/claude-sonnet-4.5")
@@ -74,12 +33,56 @@ class TestModelCommand:
assert cli_obj.agent is None
save_mock.assert_called_once_with("model.default", "anthropic/claude-sonnet-4.5")
def test_invalid_model_from_api_is_rejected(self, capsys):
cli_obj = self._make_cli()
with patch("hermes_cli.auth.resolve_provider", return_value="openrouter"), \
patch("hermes_cli.models.fetch_api_models",
return_value=["anthropic/claude-opus-4.6"]), \
patch("cli.save_config_value") as save_mock:
cli_obj.process_command("/model anthropic/fake-model")
output = capsys.readouterr().out
assert "not a valid model" in output
assert cli_obj.model == "anthropic/claude-opus-4.6" # unchanged
assert cli_obj.agent is not None # not reset
save_mock.assert_not_called()
def test_model_when_api_unreachable_falls_back_session_only(self, capsys):
cli_obj = self._make_cli()
with patch("hermes_cli.auth.resolve_provider", return_value="openrouter"), \
patch("hermes_cli.models.fetch_api_models", return_value=None), \
patch("cli.save_config_value") as save_mock:
cli_obj.process_command("/model anthropic/claude-sonnet-next")
output = capsys.readouterr().out
assert "session only" in output
assert cli_obj.model == "anthropic/claude-sonnet-next"
assert cli_obj.agent is None
save_mock.assert_not_called()
def test_bad_format_rejected_without_api_call(self, capsys):
cli_obj = self._make_cli()
with patch("hermes_cli.auth.resolve_provider", return_value="openrouter"), \
patch("hermes_cli.models.fetch_api_models") as fetch_mock, \
patch("cli.save_config_value") as save_mock:
cli_obj.process_command("/model invalid-no-slash")
output = capsys.readouterr().out
assert "provider/model" in output
assert cli_obj.model == "anthropic/claude-opus-4.6" # unchanged
fetch_mock.assert_not_called() # no API call for format errors
save_mock.assert_not_called()
def test_validation_crash_falls_back_to_save(self, capsys):
"""If validate_requested_model throws, /model should still work (old behavior)."""
cli_obj = self._make_cli()
with patch("hermes_cli.auth.resolve_provider", return_value="openrouter"), \
patch("hermes_cli.models.validate_requested_model", side_effect=RuntimeError("boom")), \
patch("hermes_cli.models.validate_requested_model",
side_effect=RuntimeError("boom")), \
patch("cli.save_config_value", return_value=True) as save_mock:
cli_obj.process_command("/model anthropic/claude-sonnet-4.5")