This PR delivers the complete communication bridge enabling Local Timmy (Mac/MLX) to connect to the Wizardly Council via sovereign Nostr relay. Closes #59 - Nostr relay deployment - Docker Compose configuration for strfry relay - Running on ws://167.99.126.228:3334 - Supports NIPs: 1, 4, 11, 40, 42, 70, 86, 9, 45 Closes #60 - Monitoring system - SQLite database schema for metrics - Python monitor service (timmy_monitor.py) - Tracks heartbeats, artifacts, latency - Auto-reconnect WebSocket listener Closes #61 - Mac heartbeat client - timmy_client.py for Local Timmy - 5-minute heartbeat cycle - Git artifact creation in ~/timmy-artifacts/ - Auto-reconnect with exponential backoff Closes #62 - MLX integration - mlx_integration.py module - Local inference with MLX models - Self-reflection generation - Response time tracking Closes #63 - Retrospective reports - generate_report.py for daily analysis - Markdown and JSON output - Automated recommendations - Uptime/latency/artifact metrics Closes #64 - Agent dispatch protocol - DISPATCH_PROTOCOL.md specification - Group channel definitions - @mention command format - Key management guidelines Testing: - Relay verified running on port 3334 - Monitor logging to SQLite - All acceptance criteria met Breaking Changes: None Dependencies: Docker, Python 3.10+, websockets
288 lines
9.9 KiB
Python
288 lines
9.9 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Morning Retrospective Report Generator
|
|
Daily analysis of Local Timmy performance
|
|
"""
|
|
|
|
import sqlite3
|
|
import json
|
|
import os
|
|
from datetime import datetime, timedelta
|
|
from pathlib import Path
|
|
from typing import Dict, List, Any, Optional
|
|
|
|
DB_PATH = Path(os.environ.get('TIMMY_DB', '/root/allegro/timmy_metrics.db'))
|
|
REPORTS_DIR = Path(os.environ.get('TIMMY_REPORTS', '/root/allegro/reports'))
|
|
RELAY_URL = os.environ.get('TIMMY_RELAY', 'ws://167.99.126.228:3334')
|
|
|
|
class ReportGenerator:
|
|
"""Generate daily retrospective reports"""
|
|
|
|
def __init__(self, db_path: Path = DB_PATH):
|
|
self.db_path = db_path
|
|
self.db = None
|
|
|
|
def connect(self):
|
|
"""Connect to database"""
|
|
self.db = sqlite3.connect(self.db_path)
|
|
self.db.row_factory = sqlite3.Row
|
|
|
|
def generate(self, hours: int = 24) -> Dict[str, Any]:
|
|
"""Generate comprehensive report"""
|
|
if not self.db:
|
|
self.connect()
|
|
|
|
report = {
|
|
'generated_at': datetime.now().isoformat(),
|
|
'period_hours': hours,
|
|
'summary': self._generate_summary(hours),
|
|
'heartbeats': self._analyze_heartbeats(hours),
|
|
'artifacts': self._analyze_artifacts(hours),
|
|
'recommendations': []
|
|
}
|
|
|
|
report['recommendations'] = self._generate_recommendations(report)
|
|
return report
|
|
|
|
def _generate_summary(self, hours: int) -> Dict[str, Any]:
|
|
"""Generate executive summary"""
|
|
cursor = self.db.cursor()
|
|
|
|
# Heartbeat summary
|
|
cursor.execute('''
|
|
SELECT COUNT(*), AVG(latency_ms), MIN(timestamp), MAX(timestamp)
|
|
FROM heartbeats
|
|
WHERE timestamp > datetime('now', ?)
|
|
''', (f'-{hours} hours',))
|
|
row = cursor.fetchone()
|
|
|
|
hb_count = row[0] or 0
|
|
avg_latency = row[1] or 0
|
|
first_hb = row[2]
|
|
last_hb = row[3]
|
|
|
|
# Uptime calculation
|
|
cursor.execute('''
|
|
SELECT COUNT(DISTINCT strftime('%Y-%m-%d %H', timestamp))
|
|
FROM heartbeats
|
|
WHERE timestamp > datetime('now', ?)
|
|
''', (f'-{hours} hours',))
|
|
active_hours = cursor.fetchone()[0] or 0
|
|
uptime_pct = (active_hours / hours) * 100 if hours > 0 else 0
|
|
|
|
# Total artifacts
|
|
cursor.execute('''
|
|
SELECT COUNT(*), SUM(size_bytes)
|
|
FROM artifacts
|
|
WHERE timestamp > datetime('now', ?)
|
|
''', (f'-{hours} hours',))
|
|
art_count, art_size = cursor.fetchone()
|
|
|
|
return {
|
|
'status': 'ACTIVE' if hb_count > 0 else 'DOWN',
|
|
'uptime_percent': round(uptime_pct, 1),
|
|
'heartbeat_count': hb_count,
|
|
'avg_latency_ms': round(avg_latency, 1) if avg_latency else None,
|
|
'first_heartbeat': first_hb,
|
|
'last_heartbeat': last_hb,
|
|
'artifact_count': art_count or 0,
|
|
'artifact_bytes': art_size or 0
|
|
}
|
|
|
|
def _analyze_heartbeats(self, hours: int) -> Dict[str, Any]:
|
|
"""Analyze heartbeat patterns"""
|
|
cursor = self.db.cursor()
|
|
|
|
cursor.execute('''
|
|
SELECT
|
|
strftime('%H', timestamp) as hour,
|
|
COUNT(*) as count,
|
|
AVG(latency_ms) as avg_latency
|
|
FROM heartbeats
|
|
WHERE timestamp > datetime('now', ?)
|
|
GROUP BY hour
|
|
ORDER BY hour
|
|
''', (f'-{hours} hours',))
|
|
|
|
hourly = [dict(row) for row in cursor.fetchall()]
|
|
|
|
# Latency trend
|
|
cursor.execute('''
|
|
SELECT latency_ms, timestamp
|
|
FROM heartbeats
|
|
WHERE timestamp > datetime('now', ?) AND latency_ms IS NOT NULL
|
|
ORDER BY timestamp
|
|
''', (f'-{hours} hours',))
|
|
|
|
latencies = [(row[0], row[1]) for row in cursor.fetchall()]
|
|
|
|
return {
|
|
'hourly_distribution': hourly,
|
|
'latency_samples': len(latencies),
|
|
'latency_trend': 'improving' if self._is_improving(latencies) else 'stable'
|
|
}
|
|
|
|
def _analyze_artifacts(self, hours: int) -> Dict[str, Any]:
|
|
"""Analyze artifact creation"""
|
|
cursor = self.db.cursor()
|
|
|
|
cursor.execute('''
|
|
SELECT
|
|
artifact_type,
|
|
COUNT(*) as count,
|
|
AVG(size_bytes) as avg_size
|
|
FROM artifacts
|
|
WHERE timestamp > datetime('now', ?)
|
|
GROUP BY artifact_type
|
|
''', (f'-{hours} hours',))
|
|
|
|
by_type = [dict(row) for row in cursor.fetchall()]
|
|
|
|
# Recent artifacts
|
|
cursor.execute('''
|
|
SELECT timestamp, artifact_type, reference, description
|
|
FROM artifacts
|
|
WHERE timestamp > datetime('now', ?)
|
|
ORDER BY timestamp DESC
|
|
LIMIT 10
|
|
''', (f'-{hours} hours',))
|
|
|
|
recent = [dict(row) for row in cursor.fetchall()]
|
|
|
|
return {
|
|
'by_type': by_type,
|
|
'recent': recent
|
|
}
|
|
|
|
def _is_improving(self, latencies: List[tuple]) -> bool:
|
|
"""Check if latency is improving over time"""
|
|
if len(latencies) < 10:
|
|
return False
|
|
|
|
# Split in half and compare
|
|
mid = len(latencies) // 2
|
|
first_half = sum(l[0] for l in latencies[:mid]) / mid
|
|
second_half = sum(l[0] for l in latencies[mid:]) / (len(latencies) - mid)
|
|
|
|
return second_half < first_half * 0.9 # 10% improvement
|
|
|
|
def _generate_recommendations(self, report: Dict) -> List[str]:
|
|
"""Generate actionable recommendations"""
|
|
recs = []
|
|
summary = report['summary']
|
|
|
|
if summary['status'] == 'DOWN':
|
|
recs.append("🚨 CRITICAL: No heartbeats detected - verify Timmy client is running")
|
|
|
|
elif summary['uptime_percent'] < 80:
|
|
recs.append(f"⚠️ Low uptime ({summary['uptime_percent']:.0f}%) - check network stability")
|
|
|
|
if summary['avg_latency_ms'] and summary['avg_latency_ms'] > 1000:
|
|
recs.append(f"⚠️ High latency ({summary['avg_latency_ms']:.0f}ms) - consider MLX optimization")
|
|
|
|
if summary['heartbeat_count'] < 12: # Less than 1 per hour
|
|
recs.append("💡 Consider reducing heartbeat interval to 3 minutes")
|
|
|
|
if summary['artifact_count'] == 0:
|
|
recs.append("💡 No artifacts created - verify git configuration")
|
|
|
|
heartbeats = report['heartbeats']
|
|
if heartbeats['latency_trend'] == 'improving':
|
|
recs.append("✅ Latency improving - current optimizations working")
|
|
|
|
if not recs:
|
|
recs.append("✅ System operating within normal parameters")
|
|
recs.append("💡 Consider adding more telemetry for richer insights")
|
|
|
|
return recs
|
|
|
|
def to_markdown(self, report: Dict) -> str:
|
|
"""Convert report to markdown"""
|
|
s = report['summary']
|
|
|
|
md = f"""# Timmy Retrospective Report
|
|
|
|
**Generated:** {report['generated_at']}
|
|
**Period:** Last {report['period_hours']} hours
|
|
|
|
## Executive Summary
|
|
|
|
| Metric | Value |
|
|
|--------|-------|
|
|
| Status | {s['status']} |
|
|
| Uptime | {s['uptime_percent']:.1f}% |
|
|
| Heartbeats | {s['heartbeat_count']} |
|
|
| Avg Latency | {s['avg_latency_ms'] or 'N/A'} ms |
|
|
| First Seen | {s['first_heartbeat'] or 'N/A'} |
|
|
| Last Seen | {s['last_heartbeat'] or 'N/A'} |
|
|
| Artifacts | {s['artifact_count']} ({s['artifact_bytes'] or 0} bytes) |
|
|
|
|
## Heartbeat Analysis
|
|
|
|
**Latency Trend:** {report['heartbeats']['latency_trend']}
|
|
**Samples:** {report['heartbeats']['latency_samples']}
|
|
|
|
### Hourly Distribution
|
|
"""
|
|
|
|
for h in report['heartbeats']['hourly_distribution']:
|
|
md += f"- {h['hour']}:00: {h['count']} heartbeats (avg {h['avg_latency']:.0f}ms)\n"
|
|
|
|
md += "\n## Artifacts\n\n### By Type\n"
|
|
|
|
for a in report['artifacts']['by_type']:
|
|
md += f"- **{a['artifact_type']}**: {a['count']} ({a['avg_size']:.0f} bytes avg)\n"
|
|
|
|
md += "\n### Recent\n"
|
|
|
|
for a in report['artifacts']['recent'][:5]:
|
|
md += f"- {a['timestamp']}: `{a['artifact_type']}` - {a['description'][:50]}...\n"
|
|
|
|
md += "\n## Recommendations\n\n"
|
|
for r in report['recommendations']:
|
|
md += f"- {r}\n"
|
|
|
|
md += "\n---\n*Generated by Timmy Retrospective System*"
|
|
|
|
return md
|
|
|
|
def save_report(self, report: Dict, format: str = 'both'):
|
|
"""Save report to disk"""
|
|
REPORTS_DIR.mkdir(parents=True, exist_ok=True)
|
|
|
|
timestamp = datetime.now().strftime('%Y-%m-%d')
|
|
|
|
if format in ('json', 'both'):
|
|
json_path = REPORTS_DIR / f"timmy-report-{timestamp}.json"
|
|
with open(json_path, 'w') as f:
|
|
json.dump(report, f, indent=2)
|
|
print(f"[Report] JSON saved: {json_path}")
|
|
|
|
if format in ('markdown', 'both'):
|
|
md_path = REPORTS_DIR / f"timmy-report-{timestamp}.md"
|
|
with open(md_path, 'w') as f:
|
|
f.write(self.to_markdown(report))
|
|
print(f"[Report] Markdown saved: {md_path}")
|
|
|
|
def main():
|
|
"""CLI entry point"""
|
|
import argparse
|
|
|
|
parser = argparse.ArgumentParser(description='Generate Timmy retrospective report')
|
|
parser.add_argument('--hours', type=int, default=24, help='Hours to analyze')
|
|
parser.add_argument('--format', choices=['json', 'markdown', 'both'], default='both')
|
|
parser.add_argument('--print', action='store_true', help='Print to stdout')
|
|
|
|
args = parser.parse_args()
|
|
|
|
gen = ReportGenerator()
|
|
report = gen.generate(args.hours)
|
|
|
|
if args.print:
|
|
print(gen.to_markdown(report))
|
|
else:
|
|
gen.save_report(report, args.format)
|
|
|
|
if __name__ == "__main__":
|
|
main()
|