1
0
* polish: streamline nav, extract inline styles, improve tablet UX

- Restructure desktop nav from 8+ flat links + overflow dropdown into
  5 grouped dropdowns (Core, Agents, Intel, System, More) matching
  the mobile menu structure to reduce decision fatigue
- Extract all inline styles from mission_control.html and base.html
  notification elements into mission-control.css with semantic classes
- Replace JS-built innerHTML with secure DOM construction in
  notification loader and chat history
- Add CONNECTING state to connection indicator (amber) instead of
  showing OFFLINE before WebSocket connects
- Add tablet breakpoint (1024px) with larger touch targets for
  Apple Pencil / stylus use and safe-area padding for iPad toolbar
- Add active-link highlighting in desktop dropdown menus
- Rename "Mission Control" page title to "System Overview" to
  disambiguate from the chat home page
- Add "Home — Timmy Time" page title to index.html

https://claude.ai/code/session_015uPUoKyYa8M2UAcyk5Gt6h

* fix(security): move auth-gate credentials to environment variables

Hardcoded username, password, and HMAC secret in auth-gate.py replaced
with os.environ lookups. Startup now refuses to run if any variable is
unset. Added AUTH_GATE_SECRET/USER/PASS to .env.example.

https://claude.ai/code/session_015uPUoKyYa8M2UAcyk5Gt6h

* refactor(tooling): migrate from black+isort+bandit to ruff

Replace three separate linting/formatting tools with a single ruff
invocation. Updates tox.ini (lint, format, pre-push, pre-commit envs),
.pre-commit-config.yaml, and CI workflow. Fixes all ruff errors
including unused imports, missing raise-from, and undefined names.
Ruff config maps existing bandit skips to equivalent S-rules.

https://claude.ai/code/session_015uPUoKyYa8M2UAcyk5Gt6h

---------

Co-authored-by: Claude <noreply@anthropic.com>
This commit is contained in:
Alexander Whitestone
2026-03-11 12:23:35 -04:00
committed by GitHub
parent 708c8a2477
commit 9d78eb31d1
149 changed files with 884 additions and 962 deletions

View File

@@ -13,10 +13,10 @@ import base64
import logging
import time
from dataclasses import dataclass, field
from datetime import datetime, timedelta, timezone
from datetime import UTC, datetime
from enum import Enum
from pathlib import Path
from typing import Any, Optional
from typing import Any
try:
import yaml
@@ -65,8 +65,8 @@ class ProviderMetrics:
successful_requests: int = 0
failed_requests: int = 0
total_latency_ms: float = 0.0
last_request_time: Optional[str] = None
last_error_time: Optional[str] = None
last_request_time: str | None = None
last_error_time: str | None = None
consecutive_failures: int = 0
@property
@@ -103,19 +103,19 @@ class Provider:
type: str # ollama, openai, anthropic, airllm
enabled: bool
priority: int
url: Optional[str] = None
api_key: Optional[str] = None
base_url: Optional[str] = None
url: str | None = None
api_key: str | None = None
base_url: str | None = None
models: list[dict] = field(default_factory=list)
# Runtime state
status: ProviderStatus = ProviderStatus.HEALTHY
metrics: ProviderMetrics = field(default_factory=ProviderMetrics)
circuit_state: CircuitState = CircuitState.CLOSED
circuit_opened_at: Optional[float] = None
circuit_opened_at: float | None = None
half_open_calls: int = 0
def get_default_model(self) -> Optional[str]:
def get_default_model(self) -> str | None:
"""Get the default model for this provider."""
for model in self.models:
if model.get("default"):
@@ -124,7 +124,7 @@ class Provider:
return self.models[0]["name"]
return None
def get_model_with_capability(self, capability: str) -> Optional[str]:
def get_model_with_capability(self, capability: str) -> str | None:
"""Get a model that supports the given capability."""
for model in self.models:
capabilities = model.get("capabilities", [])
@@ -191,14 +191,14 @@ class CascadeRouter:
metrics = router.get_metrics()
"""
def __init__(self, config_path: Optional[Path] = None) -> None:
def __init__(self, config_path: Path | None = None) -> None:
self.config_path = config_path or Path("config/providers.yaml")
self.providers: list[Provider] = []
self.config: RouterConfig = RouterConfig()
self._load_config()
# Initialize multi-modal manager if available
self._mm_manager: Optional[Any] = None
self._mm_manager: Any | None = None
try:
from infrastructure.models.multimodal import get_multimodal_manager
@@ -310,10 +310,10 @@ class CascadeRouter:
elif provider.type == "airllm":
# Check if airllm is installed
try:
import airllm
import importlib.util
return True
except ImportError:
return importlib.util.find_spec("airllm") is not None
except (ImportError, ModuleNotFoundError):
return False
elif provider.type in ("openai", "anthropic", "grok"):
@@ -368,7 +368,7 @@ class CascadeRouter:
def _get_fallback_model(
self, provider: Provider, original_model: str, content_type: ContentType
) -> Optional[str]:
) -> str | None:
"""Get a fallback model for the given content type."""
# Map content type to capability
capability_map = {
@@ -397,9 +397,9 @@ class CascadeRouter:
async def complete(
self,
messages: list[dict],
model: Optional[str] = None,
model: str | None = None,
temperature: float = 0.7,
max_tokens: Optional[int] = None,
max_tokens: int | None = None,
) -> dict:
"""Complete a chat conversation with automatic failover.
@@ -523,7 +523,7 @@ class CascadeRouter:
messages: list[dict],
model: str,
temperature: float,
max_tokens: Optional[int],
max_tokens: int | None,
content_type: ContentType = ContentType.TEXT,
) -> dict:
"""Try a single provider request."""
@@ -649,7 +649,7 @@ class CascadeRouter:
messages: list[dict],
model: str,
temperature: float,
max_tokens: Optional[int],
max_tokens: int | None,
) -> dict:
"""Call OpenAI API."""
import openai
@@ -681,7 +681,7 @@ class CascadeRouter:
messages: list[dict],
model: str,
temperature: float,
max_tokens: Optional[int],
max_tokens: int | None,
) -> dict:
"""Call Anthropic API."""
import anthropic
@@ -727,7 +727,7 @@ class CascadeRouter:
messages: list[dict],
model: str,
temperature: float,
max_tokens: Optional[int],
max_tokens: int | None,
) -> dict:
"""Call xAI Grok API via OpenAI-compatible SDK."""
import httpx
@@ -759,7 +759,7 @@ class CascadeRouter:
provider.metrics.total_requests += 1
provider.metrics.successful_requests += 1
provider.metrics.total_latency_ms += latency_ms
provider.metrics.last_request_time = datetime.now(timezone.utc).isoformat()
provider.metrics.last_request_time = datetime.now(UTC).isoformat()
provider.metrics.consecutive_failures = 0
# Close circuit breaker if half-open
@@ -778,7 +778,7 @@ class CascadeRouter:
"""Record a failed request."""
provider.metrics.total_requests += 1
provider.metrics.failed_requests += 1
provider.metrics.last_error_time = datetime.now(timezone.utc).isoformat()
provider.metrics.last_error_time = datetime.now(UTC).isoformat()
provider.metrics.consecutive_failures += 1
# Check if we should open circuit breaker
@@ -864,7 +864,7 @@ class CascadeRouter:
self,
prompt: str,
image_path: str,
model: Optional[str] = None,
model: str | None = None,
temperature: float = 0.7,
) -> dict:
"""Convenience method for vision requests.
@@ -893,7 +893,7 @@ class CascadeRouter:
# Module-level singleton
cascade_router: Optional[CascadeRouter] = None
cascade_router: CascadeRouter | None = None
def get_router() -> CascadeRouter: