Compare commits
1 Commits
door/issue
...
feat/136-c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2b91272f4e |
9
Makefile
9
Makefile
@@ -46,3 +46,12 @@ ssl:
|
||||
|
||||
service:
|
||||
ssh root@$(VPS) "cd /opt/the-door && bash deploy/deploy.sh --service"
|
||||
|
||||
# Crisis metrics
|
||||
.PHONY: metrics metrics-json
|
||||
|
||||
metrics: ## Show crisis metrics summary (last 7 days)
|
||||
python3 -m crisis.metrics --summary
|
||||
|
||||
metrics-json: ## Export crisis metrics as JSON
|
||||
python3 -m crisis.metrics --json
|
||||
|
||||
199
crisis/metrics.py
Normal file
199
crisis/metrics.py
Normal file
@@ -0,0 +1,199 @@
|
||||
"""Crisis metrics — aggregate detection data for operators.
|
||||
|
||||
Tracks crisis detection events and provides summary reports.
|
||||
|
||||
Usage:
|
||||
python3 -m crisis.metrics --summary # weekly report
|
||||
python3 -m crisis.metrics --json # raw JSON export
|
||||
python3 -m crisis.metrics --last 7d # last 7 days
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from collections import Counter
|
||||
from dataclasses import dataclass, asdict
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
# Data directory for metrics storage
|
||||
_DATA_DIR = Path(os.getenv("CRISIS_DATA_DIR", str(Path.home() / ".the-door")))
|
||||
_METRICS_FILE = _DATA_DIR / "crisis-metrics.jsonl"
|
||||
|
||||
|
||||
@dataclass
|
||||
class CrisisEvent:
|
||||
"""A single crisis detection event."""
|
||||
timestamp: float
|
||||
level: str # NONE, LOW, MODERATE, HIGH, CRITICAL
|
||||
indicators: list
|
||||
session_id: str = ""
|
||||
source: str = "" # "chat", "gateway", "cli"
|
||||
|
||||
|
||||
@dataclass
|
||||
class MetricsSummary:
|
||||
"""Aggregated metrics summary."""
|
||||
period_days: int
|
||||
total_events: int
|
||||
by_level: Dict[str, int]
|
||||
top_indicators: List[tuple]
|
||||
sessions_affected: int
|
||||
avg_daily: float
|
||||
peak_day: str
|
||||
peak_count: int
|
||||
generated_at: str
|
||||
|
||||
|
||||
def log_event(event: CrisisEvent) -> None:
|
||||
"""Log a crisis event to the metrics file."""
|
||||
_DATA_DIR.mkdir(parents=True, exist_ok=True)
|
||||
with open(_METRICS_FILE, "a") as f:
|
||||
f.write(json.dumps(asdict(event)) + "\n")
|
||||
|
||||
|
||||
def load_events(days: int = 7) -> List[CrisisEvent]:
|
||||
"""Load crisis events from the last N days."""
|
||||
if not _METRICS_FILE.exists():
|
||||
return []
|
||||
|
||||
cutoff = time.time() - (days * 86400)
|
||||
events = []
|
||||
|
||||
try:
|
||||
with open(_METRICS_FILE) as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
data = json.loads(line)
|
||||
if data.get("timestamp", 0) >= cutoff:
|
||||
events.append(CrisisEvent(**data))
|
||||
except (json.JSONDecodeError, KeyError):
|
||||
pass
|
||||
|
||||
return events
|
||||
|
||||
|
||||
def compute_summary(days: int = 7) -> MetricsSummary:
|
||||
"""Compute metrics summary for the given period."""
|
||||
events = load_events(days)
|
||||
now = time.time()
|
||||
|
||||
# By level
|
||||
by_level = Counter(e.level for e in events)
|
||||
|
||||
# Top indicators
|
||||
indicator_counts = Counter()
|
||||
for e in events:
|
||||
for ind in e.indicators:
|
||||
indicator_counts[ind] += 1
|
||||
top_indicators = indicator_counts.most_common(10)
|
||||
|
||||
# Sessions
|
||||
sessions = set(e.session_id for e in events if e.session_id)
|
||||
|
||||
# Peak day
|
||||
from collections import defaultdict
|
||||
daily = defaultdict(int)
|
||||
for e in events:
|
||||
day = time.strftime("%Y-%m-%d", time.localtime(e.timestamp))
|
||||
daily[day] += 1
|
||||
peak_day = max(daily, key=daily.get) if daily else "N/A"
|
||||
peak_count = daily.get(peak_day, 0)
|
||||
|
||||
return MetricsSummary(
|
||||
period_days=days,
|
||||
total_events=len(events),
|
||||
by_level=dict(by_level),
|
||||
top_indicators=top_indicators,
|
||||
sessions_affected=len(sessions),
|
||||
avg_daily=round(len(events) / max(days, 1), 1),
|
||||
peak_day=peak_day,
|
||||
peak_count=peak_count,
|
||||
generated_at=time.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
)
|
||||
|
||||
|
||||
def format_summary(summary: MetricsSummary) -> str:
|
||||
"""Format metrics summary as human-readable report."""
|
||||
lines = [
|
||||
"Crisis Metrics Summary",
|
||||
"=" * 40,
|
||||
f"Period: Last {summary.period_days} days",
|
||||
f"Generated: {summary.generated_at}",
|
||||
"",
|
||||
f"Total events: {summary.total_events}",
|
||||
f"Daily avg: {summary.avg_daily}",
|
||||
f"Sessions: {summary.sessions_affected}",
|
||||
f"Peak day: {summary.peak_day} ({summary.peak_count} events)",
|
||||
"",
|
||||
]
|
||||
|
||||
if summary.by_level:
|
||||
lines.append("By severity:")
|
||||
for level in ["CRITICAL", "HIGH", "MODERATE", "LOW", "NONE"]:
|
||||
count = summary.by_level.get(level, 0)
|
||||
if count > 0:
|
||||
bar = "█" * min(count, 30)
|
||||
lines.append(f" {level:10s} {count:4d} {bar}")
|
||||
lines.append("")
|
||||
|
||||
if summary.top_indicators:
|
||||
lines.append("Top indicators:")
|
||||
for indicator, count in summary.top_indicators[:5]:
|
||||
lines.append(f" {indicator}: {count}")
|
||||
lines.append("")
|
||||
|
||||
if summary.total_events == 0:
|
||||
lines.append("No crisis events in this period.")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description="Crisis metrics summary")
|
||||
parser.add_argument("--summary", action="store_true", help="Print summary report")
|
||||
parser.add_argument("--json", action="store_true", dest="as_json", help="Output JSON")
|
||||
parser.add_argument("--last", default="7d", help="Time period (e.g., 7d, 30d)")
|
||||
parser.add_argument("--log", nargs=2, metavar=("LEVEL", "INDICATOR"), help="Log a test event")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Parse period
|
||||
period_str = args.last.rstrip("d")
|
||||
try:
|
||||
days = int(period_str)
|
||||
except ValueError:
|
||||
days = 7
|
||||
|
||||
# Log mode
|
||||
if args.log:
|
||||
level, indicator = args.log
|
||||
event = CrisisEvent(
|
||||
timestamp=time.time(),
|
||||
level=level.upper(),
|
||||
indicators=[indicator],
|
||||
session_id="cli-test",
|
||||
source="cli",
|
||||
)
|
||||
log_event(event)
|
||||
print(f"Logged: {level.upper()} / {indicator}")
|
||||
return 0
|
||||
|
||||
# Compute summary
|
||||
summary = compute_summary(days)
|
||||
|
||||
if args.as_json:
|
||||
print(json.dumps(asdict(summary), indent=2))
|
||||
else:
|
||||
print(format_summary(summary))
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
23
index.html
23
index.html
@@ -993,16 +993,6 @@ Sovereignty and service always.`;
|
||||
|
||||
function trapFocusInOverlay(e) {
|
||||
if (!crisisOverlay.classList.contains('active')) return;
|
||||
|
||||
// Escape: dismiss overlay (only when dismiss button is enabled after countdown)
|
||||
if (e.key === 'Escape') {
|
||||
e.preventDefault();
|
||||
if (!overlayDismissBtn.disabled) {
|
||||
overlayDismissBtn.click();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (e.key !== 'Tab') return;
|
||||
|
||||
var focusable = getOverlayFocusableElements();
|
||||
@@ -1011,13 +1001,6 @@ Sovereignty and service always.`;
|
||||
var first = focusable[0];
|
||||
var last = focusable[focusable.length - 1];
|
||||
|
||||
// If focus escaped outside the overlay, bring it back
|
||||
if (!crisisOverlay.contains(document.activeElement)) {
|
||||
e.preventDefault();
|
||||
first.focus();
|
||||
return;
|
||||
}
|
||||
|
||||
if (e.shiftKey) {
|
||||
// Shift+Tab: if on first, wrap to last
|
||||
if (document.activeElement === first) {
|
||||
@@ -1067,11 +1050,7 @@ Sovereignty and service always.`;
|
||||
}
|
||||
}, 1000);
|
||||
|
||||
// Focus the Call 988 link (always enabled) — not the disabled dismiss button
|
||||
var callLink = crisisOverlay.querySelector('a.overlay-call');
|
||||
if (callLink) {
|
||||
callLink.focus();
|
||||
}
|
||||
overlayDismissBtn.focus();
|
||||
}
|
||||
|
||||
// Register focus trap on document (always listening, gated by class check)
|
||||
|
||||
@@ -52,38 +52,6 @@ class TestCrisisOverlayFocusTrap(unittest.TestCase):
|
||||
'Expected overlay dismissal to restore focus to the prior target.',
|
||||
)
|
||||
|
||||
def test_overlay_initial_focus_targets_enabled_element(self):
|
||||
"""Issue #69: overlay must not focus the disabled dismiss button on open."""
|
||||
# The showOverlay function should NOT call overlayDismissBtn.focus()
|
||||
# while the button is disabled. Instead it should focus an enabled element.
|
||||
self.assertNotRegex(
|
||||
self.html,
|
||||
r"overlayDismissBtn\.disabled\s*=\s*true;.*overlayDismissBtn\.focus\(\)",
|
||||
'showOverlay must not focus the dismiss button while it is disabled (issue #69).',
|
||||
)
|
||||
# Verify focus goes to the Call 988 link (always enabled)
|
||||
self.assertIn(
|
||||
"querySelector('a.overlay-call')",
|
||||
self.html,
|
||||
'Expected showOverlay to focus the Call 988 link on open.',
|
||||
)
|
||||
|
||||
def test_overlay_escape_key_dismisses(self):
|
||||
"""Issue #69/95: Escape key should dismiss the overlay when countdown completes."""
|
||||
self.assertRegex(
|
||||
self.html,
|
||||
r"e\.key\s*===\s*['\"]Escape['\"]",
|
||||
'Expected Escape key handler in overlay focus trap.',
|
||||
)
|
||||
|
||||
def test_overlay_focus_recovery_when_focus_escapes(self):
|
||||
"""Focus trap should recover focus if it escapes the overlay."""
|
||||
self.assertRegex(
|
||||
self.html,
|
||||
r"crisisOverlay\.contains\(document\.activeElement\)",
|
||||
'Focus trap should check if focus is still within the overlay.',
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
Reference in New Issue
Block a user