Compare commits
1 Commits
fix/891-pr
...
fix/892
| Author | SHA1 | Date | |
|---|---|---|---|
| 517e2c571e |
@@ -1,262 +0,0 @@
|
||||
"""
|
||||
Profile Session Isolation — #891
|
||||
|
||||
Tags sessions with their originating profile and provides
|
||||
filtered access so profiles cannot see each other's data.
|
||||
|
||||
Current state: All sessions share one state.db with no profile tag.
|
||||
This module adds profile tagging and filtered queries.
|
||||
|
||||
Usage:
|
||||
from agent.profile_isolation import tag_session, get_profile_sessions, get_active_profile
|
||||
|
||||
# Tag a new session with the current profile
|
||||
tag_session(session_id, profile_name)
|
||||
|
||||
# Get sessions for a specific profile
|
||||
sessions = get_profile_sessions("sprint")
|
||||
|
||||
# Get current active profile
|
||||
profile = get_active_profile()
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
from datetime import datetime, timezone
|
||||
|
||||
HERMES_HOME = Path(os.getenv("HERMES_HOME", str(Path.home() / ".hermes")))
|
||||
SESSIONS_DB = HERMES_HOME / "sessions" / "state.db"
|
||||
PROFILE_TAGS_FILE = HERMES_HOME / "profile_session_tags.json"
|
||||
|
||||
|
||||
def get_active_profile() -> str:
|
||||
"""Get the currently active profile name."""
|
||||
config_path = HERMES_HOME / "config.yaml"
|
||||
if config_path.exists():
|
||||
try:
|
||||
import yaml
|
||||
with open(config_path) as f:
|
||||
cfg = yaml.safe_load(f) or {}
|
||||
return cfg.get("active_profile", "default")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Check environment
|
||||
return os.getenv("HERMES_PROFILE", "default")
|
||||
|
||||
|
||||
def _load_tags() -> Dict[str, str]:
|
||||
"""Load session-to-profile mapping."""
|
||||
if not PROFILE_TAGS_FILE.exists():
|
||||
return {}
|
||||
try:
|
||||
with open(PROFILE_TAGS_FILE) as f:
|
||||
return json.load(f)
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def _save_tags(tags: Dict[str, str]):
|
||||
"""Save session-to-profile mapping."""
|
||||
PROFILE_TAGS_FILE.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(PROFILE_TAGS_FILE, "w") as f:
|
||||
json.dump(tags, f, indent=2)
|
||||
|
||||
|
||||
def tag_session(session_id: str, profile: Optional[str] = None) -> str:
|
||||
"""
|
||||
Tag a session with its originating profile.
|
||||
|
||||
Returns the profile name used.
|
||||
"""
|
||||
if profile is None:
|
||||
profile = get_active_profile()
|
||||
|
||||
tags = _load_tags()
|
||||
tags[session_id] = profile
|
||||
_save_tags(tags)
|
||||
|
||||
# Also tag in SQLite if available
|
||||
_tag_session_in_db(session_id, profile)
|
||||
|
||||
return profile
|
||||
|
||||
|
||||
def _tag_session_in_db(session_id: str, profile: str):
|
||||
"""Add profile tag to SQLite session store."""
|
||||
if not SESSIONS_DB.exists():
|
||||
return
|
||||
|
||||
try:
|
||||
conn = sqlite3.connect(str(SESSIONS_DB))
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check if sessions table has profile column
|
||||
cursor.execute("PRAGMA table_info(sessions)")
|
||||
columns = [row[1] for row in cursor.fetchall()]
|
||||
|
||||
if "profile" not in columns:
|
||||
# Add profile column
|
||||
cursor.execute("ALTER TABLE sessions ADD COLUMN profile TEXT DEFAULT 'default'")
|
||||
|
||||
# Update the session's profile
|
||||
cursor.execute(
|
||||
"UPDATE sessions SET profile = ? WHERE session_id = ?",
|
||||
(profile, session_id)
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
except Exception:
|
||||
pass # SQLite might not be available or schema differs
|
||||
|
||||
|
||||
def get_session_profile(session_id: str) -> Optional[str]:
|
||||
"""Get the profile that owns a session."""
|
||||
# Check JSON tags first
|
||||
tags = _load_tags()
|
||||
if session_id in tags:
|
||||
return tags[session_id]
|
||||
|
||||
# Check SQLite
|
||||
if SESSIONS_DB.exists():
|
||||
try:
|
||||
conn = sqlite3.connect(str(SESSIONS_DB))
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"SELECT profile FROM sessions WHERE session_id = ?",
|
||||
(session_id,)
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
conn.close()
|
||||
if row:
|
||||
return row[0]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_profile_sessions(
|
||||
profile: Optional[str] = None,
|
||||
limit: int = 100,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get sessions belonging to a specific profile.
|
||||
|
||||
Returns list of session dicts.
|
||||
"""
|
||||
if profile is None:
|
||||
profile = get_active_profile()
|
||||
|
||||
sessions = []
|
||||
|
||||
# Get from JSON tags
|
||||
tags = _load_tags()
|
||||
tagged_sessions = [sid for sid, p in tags.items() if p == profile]
|
||||
|
||||
# Get from SQLite with profile filter
|
||||
if SESSIONS_DB.exists():
|
||||
try:
|
||||
conn = sqlite3.connect(str(SESSIONS_DB))
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Try profile column first
|
||||
try:
|
||||
cursor.execute(
|
||||
"SELECT * FROM sessions WHERE profile = ? ORDER BY updated_at DESC LIMIT ?",
|
||||
(profile, limit)
|
||||
)
|
||||
for row in cursor.fetchall():
|
||||
sessions.append(dict(row))
|
||||
except Exception:
|
||||
# Fallback: filter by tagged session IDs
|
||||
if tagged_sessions:
|
||||
placeholders = ",".join("?" * len(tagged_sessions[:limit]))
|
||||
cursor.execute(
|
||||
f"SELECT * FROM sessions WHERE session_id IN ({placeholders}) ORDER BY updated_at DESC LIMIT ?",
|
||||
(*tagged_sessions[:limit], limit)
|
||||
)
|
||||
for row in cursor.fetchall():
|
||||
sessions.append(dict(row))
|
||||
|
||||
conn.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return sessions[:limit]
|
||||
|
||||
|
||||
def filter_sessions_by_profile(
|
||||
sessions: List[Dict[str, Any]],
|
||||
profile: Optional[str] = None,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Filter a list of sessions to only include those belonging to a profile."""
|
||||
if profile is None:
|
||||
profile = get_active_profile()
|
||||
|
||||
tags = _load_tags()
|
||||
filtered = []
|
||||
|
||||
for session in sessions:
|
||||
sid = session.get("session_id") or session.get("id")
|
||||
if not sid:
|
||||
continue
|
||||
|
||||
# Check tag
|
||||
session_profile = tags.get(sid)
|
||||
if session_profile is None:
|
||||
# Check SQLite
|
||||
session_profile = get_session_profile(sid)
|
||||
|
||||
if session_profile == profile or session_profile is None:
|
||||
filtered.append(session)
|
||||
|
||||
return filtered
|
||||
|
||||
|
||||
def get_profile_stats() -> Dict[str, Any]:
|
||||
"""Get statistics about profile session distribution."""
|
||||
tags = _load_tags()
|
||||
|
||||
profile_counts = {}
|
||||
for sid, profile in tags.items():
|
||||
profile_counts[profile] = profile_counts.get(profile, 0) + 1
|
||||
|
||||
total_tagged = len(tags)
|
||||
profiles = list(profile_counts.keys())
|
||||
|
||||
return {
|
||||
"total_tagged_sessions": total_tagged,
|
||||
"profiles": profiles,
|
||||
"profile_counts": profile_counts,
|
||||
"active_profile": get_active_profile(),
|
||||
}
|
||||
|
||||
|
||||
def audit_untagged_sessions() -> List[str]:
|
||||
"""Find sessions without a profile tag."""
|
||||
if not SESSIONS_DB.exists():
|
||||
return []
|
||||
|
||||
try:
|
||||
conn = sqlite3.connect(str(SESSIONS_DB))
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Get all session IDs
|
||||
cursor.execute("SELECT session_id FROM sessions")
|
||||
all_sessions = {row[0] for row in cursor.fetchall()}
|
||||
conn.close()
|
||||
|
||||
# Get tagged sessions
|
||||
tags = _load_tags()
|
||||
tagged = set(tags.keys())
|
||||
|
||||
# Return untagged
|
||||
return list(all_sessions - tagged)
|
||||
except Exception:
|
||||
return []
|
||||
224
gateway/config_validator.py
Normal file
224
gateway/config_validator.py
Normal file
@@ -0,0 +1,224 @@
|
||||
"""
|
||||
Gateway Config Validator & Fallback Fix — #892.
|
||||
|
||||
Validates gateway configuration and provides sensible defaults
|
||||
for missing keys to prevent fallback chain breaks.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from typing import Dict, Any, List, Optional
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ConfigIssue:
|
||||
"""A configuration issue found during validation."""
|
||||
key: str
|
||||
severity: str # error, warning, info
|
||||
message: str
|
||||
fix: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class ConfigValidation:
|
||||
"""Result of config validation."""
|
||||
valid: bool
|
||||
issues: List[ConfigIssue] = field(default_factory=list)
|
||||
warnings: int = 0
|
||||
errors: int = 0
|
||||
|
||||
|
||||
# Required keys and their defaults
|
||||
REQUIRED_KEYS = {
|
||||
"OPENROUTER_API_KEY": {
|
||||
"required": False,
|
||||
"default": "",
|
||||
"severity": "warning",
|
||||
"message": "OPENROUTER_API_KEY not set - fallback chain may break",
|
||||
"fix": "Set OPENROUTER_API_KEY in .env for OpenRouter provider",
|
||||
},
|
||||
"API_SERVER_KEY": {
|
||||
"required": False,
|
||||
"default": "",
|
||||
"severity": "warning",
|
||||
"message": "API_SERVER_KEY not configured",
|
||||
"fix": "Set API_SERVER_KEY in .env for API server auth",
|
||||
},
|
||||
"GITEA_TOKEN": {
|
||||
"required": False,
|
||||
"default": "",
|
||||
"severity": "info",
|
||||
"message": "GITEA_TOKEN not set - Gitea features disabled",
|
||||
"fix": "Set GITEA_TOKEN in .env for Gitea integration",
|
||||
},
|
||||
}
|
||||
|
||||
# Config validation rules
|
||||
VALIDATION_RULES = [
|
||||
{
|
||||
"key": "idle_minutes",
|
||||
"validate": lambda v: isinstance(v, (int, float)) and v > 0,
|
||||
"message": "Invalid idle_minutes={v} - must be > 0",
|
||||
"fix": "Set idle_minutes to positive integer (default: 30)",
|
||||
},
|
||||
{
|
||||
"key": "max_skills_discord",
|
||||
"validate": lambda v: isinstance(v, int) and v <= 100,
|
||||
"message": "Discord slash command limit reached ({v}/100) - skills not registered",
|
||||
"fix": "Reduce skills or paginate registration",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def validate_config(config: Dict[str, Any]) -> ConfigValidation:
|
||||
"""
|
||||
Validate gateway configuration.
|
||||
|
||||
Args:
|
||||
config: Configuration dictionary
|
||||
|
||||
Returns:
|
||||
ConfigValidation with issues found
|
||||
"""
|
||||
issues = []
|
||||
|
||||
# Check required keys
|
||||
for key, spec in REQUIRED_KEYS.items():
|
||||
value = config.get(key) or os.environ.get(key) or spec["default"]
|
||||
if spec["required"] and not value:
|
||||
issues.append(ConfigIssue(
|
||||
key=key,
|
||||
severity=spec["severity"],
|
||||
message=spec["message"],
|
||||
fix=spec["fix"],
|
||||
))
|
||||
elif not value and spec["severity"] != "error":
|
||||
issues.append(ConfigIssue(
|
||||
key=key,
|
||||
severity=spec["severity"],
|
||||
message=spec["message"],
|
||||
fix=spec["fix"],
|
||||
))
|
||||
|
||||
# Check validation rules
|
||||
for rule in VALIDATION_RULES:
|
||||
value = config.get(rule["key"])
|
||||
if value is not None:
|
||||
if not rule["validate"](value):
|
||||
issues.append(ConfigIssue(
|
||||
key=rule["key"],
|
||||
severity="error",
|
||||
message=rule["message"].format(v=value),
|
||||
fix=rule["fix"],
|
||||
))
|
||||
|
||||
errors = sum(1 for i in issues if i.severity == "error")
|
||||
warnings = sum(1 for i in issues if i.severity == "warning")
|
||||
|
||||
return ConfigValidation(
|
||||
valid=errors == 0,
|
||||
issues=issues,
|
||||
warnings=warnings,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
def apply_defaults(config: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Apply default values for missing config keys.
|
||||
|
||||
Args:
|
||||
config: Configuration dictionary
|
||||
|
||||
Returns:
|
||||
Config with defaults applied
|
||||
"""
|
||||
result = dict(config)
|
||||
|
||||
for key, spec in REQUIRED_KEYS.items():
|
||||
if key not in result or not result[key]:
|
||||
default = os.environ.get(key) or spec["default"]
|
||||
if default:
|
||||
result[key] = default
|
||||
logger.debug("Applied default for %s", key)
|
||||
|
||||
# Apply validation defaults
|
||||
if "idle_minutes" not in result or not result["idle_minutes"] or result["idle_minutes"] <= 0:
|
||||
result["idle_minutes"] = 30
|
||||
logger.debug("Applied default idle_minutes=30")
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def fix_discord_skill_limit(skills: List[str], max_skills: int = 95) -> List[str]:
|
||||
"""
|
||||
Fix Discord slash command limit by reducing skills.
|
||||
|
||||
Args:
|
||||
skills: List of skill names
|
||||
max_skills: Maximum skills to register (default 95, leaving room for built-ins)
|
||||
|
||||
Returns:
|
||||
Reduced skill list
|
||||
"""
|
||||
if len(skills) <= max_skills:
|
||||
return skills
|
||||
|
||||
logger.warning(
|
||||
"Discord skill limit: %d skills exceeds %d limit, truncating",
|
||||
len(skills), max_skills
|
||||
)
|
||||
|
||||
# Keep first max_skills (alphabetical priority)
|
||||
return sorted(skills)[:max_skills]
|
||||
|
||||
|
||||
def validate_provider_config(provider: str, config: Dict[str, Any]) -> ConfigIssue:
|
||||
"""
|
||||
Validate provider-specific configuration.
|
||||
|
||||
Args:
|
||||
provider: Provider name
|
||||
config: Provider config
|
||||
|
||||
Returns:
|
||||
ConfigIssue if invalid, None if valid
|
||||
"""
|
||||
if provider == "local-llama.cpp":
|
||||
# Check if llama.cpp is configured
|
||||
if not config.get("model_path") and not config.get("base_url"):
|
||||
return ConfigIssue(
|
||||
key=f"provider.{provider}",
|
||||
severity="warning",
|
||||
message=f"{provider} provider not configured - fallback fails",
|
||||
fix=f"Configure {provider} model_path or base_url, or remove from provider list",
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def format_validation_report(validation: ConfigValidation) -> str:
|
||||
"""Format validation results as a report."""
|
||||
lines = [
|
||||
"=" * 50,
|
||||
"GATEWAY CONFIG VALIDATION",
|
||||
"=" * 50,
|
||||
"",
|
||||
f"Status: {'VALID' if validation.valid else 'INVALID'}",
|
||||
f"Errors: {validation.errors}",
|
||||
f"Warnings: {validation.warnings}",
|
||||
"",
|
||||
]
|
||||
|
||||
if validation.issues:
|
||||
lines.append("Issues:")
|
||||
for issue in validation.issues:
|
||||
icon = "❌" if issue.severity == "error" else "⚠️" if issue.severity == "warning" else "ℹ️"
|
||||
lines.append(f" {icon} [{issue.key}] {issue.message}")
|
||||
lines.append(f" Fix: {issue.fix}")
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
@@ -1,76 +0,0 @@
|
||||
"""Tests for profile session isolation (#891)."""
|
||||
|
||||
import sys
|
||||
import json
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
# Override paths for testing
|
||||
import agent.profile_isolation as iso_mod
|
||||
_test_dir = Path(tempfile.mkdtemp())
|
||||
iso_mod.PROFILE_TAGS_FILE = _test_dir / "tags.json"
|
||||
|
||||
|
||||
def test_tag_session():
|
||||
"""Session gets tagged with profile."""
|
||||
profile = iso_mod.tag_session("sess-1", "sprint")
|
||||
assert profile == "sprint"
|
||||
assert iso_mod.get_session_profile("sess-1") == "sprint"
|
||||
|
||||
|
||||
def test_default_profile():
|
||||
"""Sessions tagged with default when no profile specified."""
|
||||
profile = iso_mod.tag_session("sess-2")
|
||||
assert profile is not None
|
||||
|
||||
|
||||
def test_get_session_profile():
|
||||
"""Can retrieve profile for tagged session."""
|
||||
iso_mod.tag_session("sess-3", "fenrir")
|
||||
assert iso_mod.get_session_profile("sess-3") == "fenrir"
|
||||
|
||||
|
||||
def test_untagged_returns_none():
|
||||
"""Untagged session returns None."""
|
||||
assert iso_mod.get_session_profile("nonexistent") is None
|
||||
|
||||
|
||||
def test_profile_stats():
|
||||
"""Stats reflect tagged sessions."""
|
||||
iso_mod.tag_session("s1", "default")
|
||||
iso_mod.tag_session("s2", "sprint")
|
||||
iso_mod.tag_session("s3", "sprint")
|
||||
stats = iso_mod.get_profile_stats()
|
||||
assert stats["total_tagged_sessions"] >= 3
|
||||
assert "sprint" in stats["profile_counts"]
|
||||
|
||||
|
||||
def test_filter_sessions():
|
||||
"""Filter returns only matching profile sessions."""
|
||||
iso_mod.tag_session("filter-1", "alpha")
|
||||
iso_mod.tag_session("filter-2", "beta")
|
||||
iso_mod.tag_session("filter-3", "alpha")
|
||||
|
||||
sessions = [
|
||||
{"session_id": "filter-1"},
|
||||
{"session_id": "filter-2"},
|
||||
{"session_id": "filter-3"},
|
||||
]
|
||||
|
||||
filtered = iso_mod.filter_sessions_by_profile(sessions, "alpha")
|
||||
ids = [s["session_id"] for s in filtered]
|
||||
assert "filter-1" in ids
|
||||
assert "filter-3" in ids
|
||||
assert "filter-2" not in ids
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
tests = [test_tag_session, test_default_profile, test_get_session_profile,
|
||||
test_untagged_returns_none, test_profile_stats, test_filter_sessions]
|
||||
for t in tests:
|
||||
print(f"Running {t.__name__}...")
|
||||
t()
|
||||
print(" PASS")
|
||||
print("\nAll tests passed.")
|
||||
Reference in New Issue
Block a user