Compare commits
1 Commits
queue/322-
...
claude/iss
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
656d7e243e |
@@ -517,3 +517,71 @@ def resolve_provider_full(
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# -- Runtime classification ---------------------------------------------------
|
||||
|
||||
# Providers that are definitively cloud-hosted (not local).
|
||||
# Used by _classify_runtime() to distinguish cloud vs unknown.
|
||||
_CLOUD_PREFIXES: frozenset[str] = frozenset(HERMES_OVERLAYS.keys()) | frozenset({
|
||||
# Common aliases that normalize to cloud providers
|
||||
"openai", "gemini", "google", "google-gemini", "google-ai-studio",
|
||||
"claude", "claude-code", "copilot", "github", "github-copilot",
|
||||
"glm", "z-ai", "z.ai", "zhipu", "zai",
|
||||
"kimi", "kimi-coding", "moonshot",
|
||||
"minimax", "minimax-china", "minimax_cn",
|
||||
"deep-seek",
|
||||
"dashscope", "aliyun", "qwen", "alibaba-cloud", "alibaba",
|
||||
"hf", "hugging-face", "huggingface-hub", "huggingface",
|
||||
"ai-gateway", "aigateway", "vercel-ai-gateway",
|
||||
"opencode-zen", "zen",
|
||||
"opencode-go-sub",
|
||||
"kilocode", "kilo-code", "kilo-gateway", "kilo",
|
||||
})
|
||||
|
||||
# Providers that are definitively local (self-hosted, no external API).
|
||||
_LOCAL_PROVIDERS: frozenset[str] = frozenset({
|
||||
"ollama", "local",
|
||||
"vllm", "llamacpp", "llama.cpp", "llama-cpp", "lmstudio", "lm-studio",
|
||||
})
|
||||
|
||||
|
||||
def _classify_runtime(provider: Optional[str], model: str) -> str:
|
||||
"""Classify a provider/model pair into a runtime category.
|
||||
|
||||
Returns one of:
|
||||
``"cloud"`` — the request targets a known remote/hosted provider.
|
||||
``"local"`` — the request targets a self-hosted/local inference server.
|
||||
``"unknown"`` — provider is unrecognised or not specified without enough
|
||||
context to determine the runtime type.
|
||||
|
||||
Edge-case rules (in order):
|
||||
1. If *provider* is set and is a known local provider → ``"local"``.
|
||||
2. If *provider* is set and is a known cloud provider → ``"cloud"``.
|
||||
3. If *provider* is set but **not** in either known set → ``"unknown"``.
|
||||
(Previously fell through to ``"local"`` — this was the bug.)
|
||||
4. If *provider* is empty/None, inspect the model string for a recognised
|
||||
cloud prefix (e.g. ``"openai/gpt-4o"`` → ``"cloud"``).
|
||||
5. Everything else → ``"unknown"``.
|
||||
"""
|
||||
p = (provider or "").strip().lower()
|
||||
|
||||
if p:
|
||||
# Rule 1: known local provider
|
||||
if p in _LOCAL_PROVIDERS:
|
||||
return "local"
|
||||
# Rule 2: known cloud provider
|
||||
if p in _CLOUD_PREFIXES:
|
||||
return "cloud"
|
||||
# Rule 3: provider is set but unrecognised — do NOT default to "local"
|
||||
return "unknown"
|
||||
|
||||
# Rule 4: no provider — try to infer from the model string
|
||||
m = (model or "").strip().lower()
|
||||
if "/" in m:
|
||||
model_prefix = m.split("/", 1)[0]
|
||||
if model_prefix in _CLOUD_PREFIXES:
|
||||
return "cloud"
|
||||
|
||||
# Rule 5: insufficient context
|
||||
return "unknown"
|
||||
|
||||
92
tests/test_classify_runtime.py
Normal file
92
tests/test_classify_runtime.py
Normal file
@@ -0,0 +1,92 @@
|
||||
"""Tests for _classify_runtime() edge cases.
|
||||
|
||||
Covers the bug reported in #556: unknown provider with a model string
|
||||
incorrectly returned "local" instead of "unknown".
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from hermes_cli.providers import _classify_runtime
|
||||
|
||||
|
||||
class TestClassifyRuntimeLocalProviders:
|
||||
def test_ollama_no_model(self):
|
||||
assert _classify_runtime("ollama", "") == "local"
|
||||
|
||||
def test_ollama_with_model(self):
|
||||
assert _classify_runtime("ollama", "llama3:8b") == "local"
|
||||
|
||||
def test_local_provider_no_model(self):
|
||||
assert _classify_runtime("local", "") == "local"
|
||||
|
||||
def test_local_provider_with_model(self):
|
||||
assert _classify_runtime("local", "my-model") == "local"
|
||||
|
||||
def test_vllm_provider(self):
|
||||
assert _classify_runtime("vllm", "meta/llama-3") == "local"
|
||||
|
||||
def test_llamacpp_provider(self):
|
||||
assert _classify_runtime("llamacpp", "mistral") == "local"
|
||||
|
||||
|
||||
class TestClassifyRuntimeCloudProviders:
|
||||
def test_anthropic_provider(self):
|
||||
assert _classify_runtime("anthropic", "claude-opus-4-6") == "cloud"
|
||||
|
||||
def test_openrouter_provider(self):
|
||||
assert _classify_runtime("openrouter", "anthropic/claude-opus-4-6") == "cloud"
|
||||
|
||||
def test_nous_provider(self):
|
||||
assert _classify_runtime("nous", "hermes-3") == "cloud"
|
||||
|
||||
def test_gemini_provider(self):
|
||||
assert _classify_runtime("gemini", "gemini-pro") == "cloud"
|
||||
|
||||
def test_deepseek_provider(self):
|
||||
assert _classify_runtime("deepseek", "deepseek-chat") == "cloud"
|
||||
|
||||
|
||||
class TestClassifyRuntimeUnknownProviders:
|
||||
"""Regression tests for #556: unknown provider should return 'unknown', not 'local'."""
|
||||
|
||||
def test_unknown_provider_with_model(self):
|
||||
"""Core bug: 'custom' provider with model must not return 'local'."""
|
||||
assert _classify_runtime("custom", "my-model") == "unknown"
|
||||
|
||||
def test_unknown_provider_no_model(self):
|
||||
"""Unknown provider with no model should return 'unknown'."""
|
||||
assert _classify_runtime("custom", "") == "unknown"
|
||||
|
||||
def test_arbitrary_provider_with_model(self):
|
||||
"""Any unrecognised provider string with a model returns 'unknown'."""
|
||||
assert _classify_runtime("my-private-llm", "some-model") == "unknown"
|
||||
|
||||
def test_arbitrary_provider_no_model(self):
|
||||
assert _classify_runtime("my-private-llm", "") == "unknown"
|
||||
|
||||
def test_whitespace_only_provider_treated_as_empty(self):
|
||||
"""Provider with only whitespace is treated as absent."""
|
||||
# No model either → unknown
|
||||
assert _classify_runtime(" ", "") == "unknown"
|
||||
|
||||
|
||||
class TestClassifyRuntimeEmptyProvider:
|
||||
def test_empty_provider_cloud_prefixed_model(self):
|
||||
"""Empty provider with cloud-prefixed model returns 'cloud'."""
|
||||
assert _classify_runtime("", "openrouter/gpt-4o") == "cloud"
|
||||
|
||||
def test_none_provider_cloud_prefixed_model(self):
|
||||
assert _classify_runtime(None, "anthropic/claude-opus-4-6") == "cloud"
|
||||
|
||||
def test_empty_provider_no_model(self):
|
||||
assert _classify_runtime("", "") == "unknown"
|
||||
|
||||
def test_none_provider_no_model(self):
|
||||
assert _classify_runtime(None, "") == "unknown"
|
||||
|
||||
def test_empty_provider_non_cloud_prefixed_model(self):
|
||||
"""No provider, model without a recognized prefix → unknown."""
|
||||
assert _classify_runtime("", "my-model") == "unknown"
|
||||
|
||||
def test_empty_provider_model_with_unknown_prefix(self):
|
||||
"""Model prefix that isn't a known cloud provider → unknown."""
|
||||
assert _classify_runtime("", "myprivate/llm-7b") == "unknown"
|
||||
Reference in New Issue
Block a user