Compare commits
1 Commits
review-fix
...
kimi/issue
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f3fc86de07 |
@@ -54,25 +54,11 @@ providers:
|
||||
context_window: 2048
|
||||
capabilities: [text, vision, streaming]
|
||||
|
||||
# Secondary: Local AirLLM (if installed)
|
||||
- name: airllm-local
|
||||
type: airllm
|
||||
enabled: false # Enable if pip install airllm
|
||||
priority: 2
|
||||
models:
|
||||
- name: 70b
|
||||
default: true
|
||||
capabilities: [text, tools, json, streaming]
|
||||
- name: 8b
|
||||
capabilities: [text, tools, json, streaming]
|
||||
- name: 405b
|
||||
capabilities: [text, tools, json, streaming]
|
||||
|
||||
# Tertiary: OpenAI (if API key available)
|
||||
# Secondary: OpenAI (if API key available)
|
||||
- name: openai-backup
|
||||
type: openai
|
||||
enabled: false # Enable by setting OPENAI_API_KEY
|
||||
priority: 3
|
||||
priority: 2
|
||||
api_key: "${OPENAI_API_KEY}" # Loaded from environment
|
||||
base_url: null # Use default OpenAI endpoint
|
||||
models:
|
||||
@@ -84,11 +70,11 @@ providers:
|
||||
context_window: 128000
|
||||
capabilities: [text, vision, tools, json, streaming]
|
||||
|
||||
# Quaternary: Anthropic (if API key available)
|
||||
# Tertiary: Anthropic (if API key available)
|
||||
- name: anthropic-backup
|
||||
type: anthropic
|
||||
enabled: false # Enable by setting ANTHROPIC_API_KEY
|
||||
priority: 4
|
||||
priority: 3
|
||||
api_key: "${ANTHROPIC_API_KEY}"
|
||||
models:
|
||||
- name: claude-3-haiku-20240307
|
||||
|
||||
@@ -100,7 +100,7 @@ class Provider:
|
||||
"""LLM provider configuration and state."""
|
||||
|
||||
name: str
|
||||
type: str # ollama, openai, anthropic, airllm
|
||||
type: str # ollama, openai, anthropic
|
||||
enabled: bool
|
||||
priority: int
|
||||
url: str | None = None
|
||||
@@ -308,15 +308,6 @@ class CascadeRouter:
|
||||
logger.debug("Ollama provider check error: %s", exc)
|
||||
return False
|
||||
|
||||
elif provider.type == "airllm":
|
||||
# Check if airllm is installed
|
||||
try:
|
||||
import importlib.util
|
||||
|
||||
return importlib.util.find_spec("airllm") is not None
|
||||
except (ImportError, ModuleNotFoundError):
|
||||
return False
|
||||
|
||||
elif provider.type in ("openai", "anthropic", "grok"):
|
||||
# Check if API key is set
|
||||
return provider.api_key is not None and provider.api_key != ""
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import time
|
||||
from pathlib import Path
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
import yaml
|
||||
@@ -489,34 +489,6 @@ class TestProviderAvailabilityCheck:
|
||||
|
||||
assert router._check_provider_available(provider) is False
|
||||
|
||||
def test_check_airllm_installed(self):
|
||||
"""Test AirLLM when installed."""
|
||||
router = CascadeRouter(config_path=Path("/nonexistent"))
|
||||
|
||||
provider = Provider(
|
||||
name="airllm",
|
||||
type="airllm",
|
||||
enabled=True,
|
||||
priority=1,
|
||||
)
|
||||
|
||||
with patch("importlib.util.find_spec", return_value=MagicMock()):
|
||||
assert router._check_provider_available(provider) is True
|
||||
|
||||
def test_check_airllm_not_installed(self):
|
||||
"""Test AirLLM when not installed."""
|
||||
router = CascadeRouter(config_path=Path("/nonexistent"))
|
||||
|
||||
provider = Provider(
|
||||
name="airllm",
|
||||
type="airllm",
|
||||
enabled=True,
|
||||
priority=1,
|
||||
)
|
||||
|
||||
with patch("importlib.util.find_spec", return_value=None):
|
||||
assert router._check_provider_available(provider) is False
|
||||
|
||||
|
||||
class TestCascadeRouterReload:
|
||||
"""Test hot-reload of providers.yaml."""
|
||||
|
||||
Reference in New Issue
Block a user