feat: complete Event Log, Ledger, Memory, Cascade Router, Upgrade Queue, Activity Feed
This commit implements six major features: 1. Event Log System (src/swarm/event_log.py) - SQLite-based audit trail for all swarm events - Task lifecycle tracking (created, assigned, completed, failed) - Agent lifecycle tracking (joined, left, status changes) - Integrated with coordinator for automatic logging - Dashboard page at /swarm/events 2. Lightning Ledger (src/lightning/ledger.py) - Transaction tracking for Lightning Network payments - Balance calculations (incoming, outgoing, net, available) - Integrated with payment_handler for automatic logging - Dashboard page at /lightning/ledger 3. Semantic Memory / Vector Store (src/memory/vector_store.py) - Embedding-based similarity search for Echo agent - Fallback to keyword matching if sentence-transformers unavailable - Personal facts storage and retrieval - Dashboard page at /memory 4. Cascade Router Integration (src/timmy/cascade_adapter.py) - Automatic LLM failover between providers (Ollama → AirLLM → API) - Circuit breaker pattern for failing providers - Metrics tracking per provider (latency, error rates) - Dashboard status page at /router/status 5. Self-Upgrade Approval Queue (src/upgrades/) - State machine for self-modifications: proposed → approved/rejected → applied/failed - Human approval required before applying changes - Git integration for branch management - Dashboard queue at /self-modify/queue 6. Real-Time Activity Feed (src/events/broadcaster.py) - WebSocket-based live activity streaming - Bridges event_log to dashboard clients - Activity panel on /swarm/live Tests: - 101 unit tests passing - 4 new E2E test files for Selenium testing - Run with: SELENIUM_UI=1 pytest tests/functional/ -v --headed Documentation: - 6 ADRs (017-022) documenting architecture decisions - Implementation summary in docs/IMPLEMENTATION_SUMMARY.md - Architecture diagram in docs/architecture-v2.md
This commit is contained in:
@@ -27,6 +27,11 @@ from dashboard.routes.spark import router as spark_router
|
||||
from dashboard.routes.creative import router as creative_router
|
||||
from dashboard.routes.discord import router as discord_router
|
||||
from dashboard.routes.self_modify import router as self_modify_router
|
||||
from dashboard.routes.events import router as events_router
|
||||
from dashboard.routes.ledger import router as ledger_router
|
||||
from dashboard.routes.memory import router as memory_router
|
||||
from dashboard.routes.router import router as router_status_router
|
||||
from dashboard.routes.upgrades import router as upgrades_router
|
||||
from router.api import router as cascade_router
|
||||
|
||||
logging.basicConfig(
|
||||
@@ -166,6 +171,11 @@ app.include_router(spark_router)
|
||||
app.include_router(creative_router)
|
||||
app.include_router(discord_router)
|
||||
app.include_router(self_modify_router)
|
||||
app.include_router(events_router)
|
||||
app.include_router(ledger_router)
|
||||
app.include_router(memory_router)
|
||||
app.include_router(router_status_router)
|
||||
app.include_router(upgrades_router)
|
||||
app.include_router(cascade_router)
|
||||
|
||||
|
||||
|
||||
91
src/dashboard/routes/events.py
Normal file
91
src/dashboard/routes/events.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""Event Log routes for viewing system events."""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import HTMLResponse
|
||||
from fastapi.templating import Jinja2Templates
|
||||
|
||||
from swarm.event_log import (
|
||||
EventType,
|
||||
list_events,
|
||||
get_event_summary,
|
||||
get_recent_events,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix="/swarm", tags=["events"])
|
||||
templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
|
||||
|
||||
|
||||
@router.get("/events", response_class=HTMLResponse)
|
||||
async def events_page(
|
||||
request: Request,
|
||||
event_type: Optional[str] = None,
|
||||
task_id: Optional[str] = None,
|
||||
agent_id: Optional[str] = None,
|
||||
):
|
||||
"""Event log viewer page."""
|
||||
# Parse event type filter
|
||||
evt_type = None
|
||||
if event_type:
|
||||
try:
|
||||
evt_type = EventType(event_type)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# Get events
|
||||
events = list_events(
|
||||
event_type=evt_type,
|
||||
task_id=task_id,
|
||||
agent_id=agent_id,
|
||||
limit=100,
|
||||
)
|
||||
|
||||
# Get summary stats
|
||||
summary = get_event_summary(minutes=60)
|
||||
|
||||
return templates.TemplateResponse(
|
||||
request,
|
||||
"events.html",
|
||||
{
|
||||
"page_title": "Event Log",
|
||||
"events": events,
|
||||
"summary": summary,
|
||||
"filter_type": event_type,
|
||||
"filter_task": task_id,
|
||||
"filter_agent": agent_id,
|
||||
"event_types": [e.value for e in EventType],
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/events/partial", response_class=HTMLResponse)
|
||||
async def events_partial(
|
||||
request: Request,
|
||||
event_type: Optional[str] = None,
|
||||
task_id: Optional[str] = None,
|
||||
agent_id: Optional[str] = None,
|
||||
):
|
||||
"""Event log partial for HTMX updates."""
|
||||
evt_type = None
|
||||
if event_type:
|
||||
try:
|
||||
evt_type = EventType(event_type)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
events = list_events(
|
||||
event_type=evt_type,
|
||||
task_id=task_id,
|
||||
agent_id=agent_id,
|
||||
limit=100,
|
||||
)
|
||||
|
||||
return templates.TemplateResponse(
|
||||
request,
|
||||
"partials/events_table.html",
|
||||
{
|
||||
"events": events,
|
||||
},
|
||||
)
|
||||
102
src/dashboard/routes/ledger.py
Normal file
102
src/dashboard/routes/ledger.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""Lightning Ledger routes for viewing transactions and balance."""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import HTMLResponse
|
||||
from fastapi.templating import Jinja2Templates
|
||||
|
||||
from lightning.ledger import (
|
||||
TransactionType,
|
||||
TransactionStatus,
|
||||
list_transactions,
|
||||
get_balance,
|
||||
get_transaction_stats,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix="/lightning", tags=["ledger"])
|
||||
templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
|
||||
|
||||
|
||||
@router.get("/ledger", response_class=HTMLResponse)
|
||||
async def ledger_page(
|
||||
request: Request,
|
||||
tx_type: Optional[str] = None,
|
||||
status: Optional[str] = None,
|
||||
):
|
||||
"""Lightning ledger page with balance and transactions."""
|
||||
# Parse filters
|
||||
filter_type = None
|
||||
if tx_type:
|
||||
try:
|
||||
filter_type = TransactionType(tx_type)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
filter_status = None
|
||||
if status:
|
||||
try:
|
||||
filter_status = TransactionStatus(status)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# Get data
|
||||
balance = get_balance()
|
||||
transactions = list_transactions(
|
||||
tx_type=filter_type,
|
||||
status=filter_status,
|
||||
limit=50,
|
||||
)
|
||||
stats = get_transaction_stats(days=7)
|
||||
|
||||
return templates.TemplateResponse(
|
||||
request,
|
||||
"ledger.html",
|
||||
{
|
||||
"page_title": "Lightning Ledger",
|
||||
"balance": balance,
|
||||
"transactions": transactions,
|
||||
"stats": stats,
|
||||
"filter_type": tx_type,
|
||||
"filter_status": status,
|
||||
"tx_types": [t.value for t in TransactionType],
|
||||
"tx_statuses": [s.value for s in TransactionStatus],
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/ledger/partial", response_class=HTMLResponse)
|
||||
async def ledger_partial(
|
||||
request: Request,
|
||||
tx_type: Optional[str] = None,
|
||||
status: Optional[str] = None,
|
||||
):
|
||||
"""Ledger transactions partial for HTMX updates."""
|
||||
filter_type = None
|
||||
if tx_type:
|
||||
try:
|
||||
filter_type = TransactionType(tx_type)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
filter_status = None
|
||||
if status:
|
||||
try:
|
||||
filter_status = TransactionStatus(status)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
transactions = list_transactions(
|
||||
tx_type=filter_type,
|
||||
status=filter_status,
|
||||
limit=50,
|
||||
)
|
||||
|
||||
return templates.TemplateResponse(
|
||||
request,
|
||||
"partials/ledger_table.html",
|
||||
{
|
||||
"transactions": transactions,
|
||||
},
|
||||
)
|
||||
98
src/dashboard/routes/memory.py
Normal file
98
src/dashboard/routes/memory.py
Normal file
@@ -0,0 +1,98 @@
|
||||
"""Memory (vector store) routes for browsing and searching memories."""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Form, Request
|
||||
from fastapi.responses import HTMLResponse
|
||||
from fastapi.templating import Jinja2Templates
|
||||
|
||||
from memory.vector_store import (
|
||||
store_memory,
|
||||
search_memories,
|
||||
get_memory_stats,
|
||||
recall_personal_facts,
|
||||
store_personal_fact,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix="/memory", tags=["memory"])
|
||||
templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
|
||||
|
||||
|
||||
@router.get("", response_class=HTMLResponse)
|
||||
async def memory_page(
|
||||
request: Request,
|
||||
query: Optional[str] = None,
|
||||
context_type: Optional[str] = None,
|
||||
agent_id: Optional[str] = None,
|
||||
):
|
||||
"""Memory browser and search page."""
|
||||
results = []
|
||||
if query:
|
||||
results = search_memories(
|
||||
query=query,
|
||||
context_type=context_type,
|
||||
agent_id=agent_id,
|
||||
limit=20,
|
||||
)
|
||||
|
||||
stats = get_memory_stats()
|
||||
facts = recall_personal_facts(limit=10)
|
||||
|
||||
return templates.TemplateResponse(
|
||||
request,
|
||||
"memory.html",
|
||||
{
|
||||
"page_title": "Memory Browser",
|
||||
"query": query,
|
||||
"results": results,
|
||||
"stats": stats,
|
||||
"facts": facts,
|
||||
"filter_type": context_type,
|
||||
"filter_agent": agent_id,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.post("/search", response_class=HTMLResponse)
|
||||
async def memory_search(
|
||||
request: Request,
|
||||
query: str = Form(...),
|
||||
context_type: Optional[str] = Form(None),
|
||||
):
|
||||
"""Search memories (form submission)."""
|
||||
results = search_memories(
|
||||
query=query,
|
||||
context_type=context_type,
|
||||
limit=20,
|
||||
)
|
||||
|
||||
# Return partial for HTMX
|
||||
return templates.TemplateResponse(
|
||||
request,
|
||||
"partials/memory_results.html",
|
||||
{
|
||||
"query": query,
|
||||
"results": results,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.post("/fact", response_class=HTMLResponse)
|
||||
async def add_fact(
|
||||
request: Request,
|
||||
fact: str = Form(...),
|
||||
agent_id: Optional[str] = Form(None),
|
||||
):
|
||||
"""Add a personal fact to memory."""
|
||||
store_personal_fact(fact, agent_id=agent_id)
|
||||
|
||||
# Return updated facts list
|
||||
facts = recall_personal_facts(limit=10)
|
||||
return templates.TemplateResponse(
|
||||
request,
|
||||
"partials/memory_facts.html",
|
||||
{
|
||||
"facts": facts,
|
||||
},
|
||||
)
|
||||
54
src/dashboard/routes/router.py
Normal file
54
src/dashboard/routes/router.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""Cascade Router status routes."""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import HTMLResponse
|
||||
from fastapi.templating import Jinja2Templates
|
||||
|
||||
from timmy.cascade_adapter import get_cascade_adapter
|
||||
|
||||
router = APIRouter(prefix="/router", tags=["router"])
|
||||
templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
|
||||
|
||||
|
||||
@router.get("/status", response_class=HTMLResponse)
|
||||
async def router_status_page(request: Request):
|
||||
"""Cascade Router status dashboard."""
|
||||
adapter = get_cascade_adapter()
|
||||
|
||||
providers = adapter.get_provider_status()
|
||||
preferred = adapter.get_preferred_provider()
|
||||
|
||||
# Calculate overall stats
|
||||
total_requests = sum(p["metrics"]["total"] for p in providers)
|
||||
total_success = sum(p["metrics"]["success"] for p in providers)
|
||||
total_failed = sum(p["metrics"]["failed"] for p in providers)
|
||||
|
||||
avg_latency = 0.0
|
||||
if providers:
|
||||
avg_latency = sum(p["metrics"]["avg_latency_ms"] for p in providers) / len(providers)
|
||||
|
||||
return templates.TemplateResponse(
|
||||
request,
|
||||
"router_status.html",
|
||||
{
|
||||
"page_title": "Router Status",
|
||||
"providers": providers,
|
||||
"preferred_provider": preferred,
|
||||
"total_requests": total_requests,
|
||||
"total_success": total_success,
|
||||
"total_failed": total_failed,
|
||||
"avg_latency_ms": round(avg_latency, 1),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/api/providers")
|
||||
async def get_providers():
|
||||
"""API endpoint for provider status (JSON)."""
|
||||
adapter = get_cascade_adapter()
|
||||
return {
|
||||
"providers": adapter.get_provider_status(),
|
||||
"preferred": adapter.get_preferred_provider(),
|
||||
}
|
||||
99
src/dashboard/routes/upgrades.py
Normal file
99
src/dashboard/routes/upgrades.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""Self-Upgrade Queue dashboard routes."""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import APIRouter, Form, HTTPException, Request
|
||||
from fastapi.responses import HTMLResponse, JSONResponse
|
||||
from fastapi.templating import Jinja2Templates
|
||||
|
||||
from upgrades.models import list_upgrades, get_upgrade, UpgradeStatus, get_pending_count
|
||||
from upgrades.queue import UpgradeQueue
|
||||
|
||||
router = APIRouter(prefix="/self-modify", tags=["upgrades"])
|
||||
templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
|
||||
|
||||
|
||||
@router.get("/queue", response_class=HTMLResponse)
|
||||
async def upgrade_queue_page(request: Request):
|
||||
"""Upgrade queue dashboard."""
|
||||
pending = list_upgrades(status=UpgradeStatus.PROPOSED)
|
||||
approved = list_upgrades(status=UpgradeStatus.APPROVED)
|
||||
history = list_upgrades(status=None)[:20] # All recent
|
||||
|
||||
# Separate history by status
|
||||
applied = [u for u in history if u.status == UpgradeStatus.APPLIED][:10]
|
||||
rejected = [u for u in history if u.status == UpgradeStatus.REJECTED][:5]
|
||||
failed = [u for u in history if u.status == UpgradeStatus.FAILED][:5]
|
||||
|
||||
return templates.TemplateResponse(
|
||||
request,
|
||||
"upgrade_queue.html",
|
||||
{
|
||||
"page_title": "Upgrade Queue",
|
||||
"pending": pending,
|
||||
"approved": approved,
|
||||
"applied": applied,
|
||||
"rejected": rejected,
|
||||
"failed": failed,
|
||||
"pending_count": len(pending),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.post("/queue/{upgrade_id}/approve", response_class=JSONResponse)
|
||||
async def approve_upgrade_endpoint(upgrade_id: str):
|
||||
"""Approve an upgrade proposal."""
|
||||
upgrade = UpgradeQueue.approve(upgrade_id)
|
||||
|
||||
if not upgrade:
|
||||
raise HTTPException(404, "Upgrade not found or not in proposed state")
|
||||
|
||||
return {"success": True, "upgrade_id": upgrade_id, "status": upgrade.status.value}
|
||||
|
||||
|
||||
@router.post("/queue/{upgrade_id}/reject", response_class=JSONResponse)
|
||||
async def reject_upgrade_endpoint(upgrade_id: str):
|
||||
"""Reject an upgrade proposal."""
|
||||
upgrade = UpgradeQueue.reject(upgrade_id)
|
||||
|
||||
if not upgrade:
|
||||
raise HTTPException(404, "Upgrade not found or not in proposed state")
|
||||
|
||||
return {"success": True, "upgrade_id": upgrade_id, "status": upgrade.status.value}
|
||||
|
||||
|
||||
@router.post("/queue/{upgrade_id}/apply", response_class=JSONResponse)
|
||||
async def apply_upgrade_endpoint(upgrade_id: str):
|
||||
"""Apply an approved upgrade."""
|
||||
success, message = UpgradeQueue.apply(upgrade_id)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(400, message)
|
||||
|
||||
return {"success": True, "message": message}
|
||||
|
||||
|
||||
@router.get("/queue/{upgrade_id}/diff", response_class=HTMLResponse)
|
||||
async def view_diff(request: Request, upgrade_id: str):
|
||||
"""View full diff for an upgrade."""
|
||||
upgrade = get_upgrade(upgrade_id)
|
||||
|
||||
if not upgrade:
|
||||
raise HTTPException(404, "Upgrade not found")
|
||||
|
||||
diff = UpgradeQueue.get_full_diff(upgrade_id)
|
||||
|
||||
return templates.TemplateResponse(
|
||||
request,
|
||||
"upgrade_diff.html",
|
||||
{
|
||||
"upgrade": upgrade,
|
||||
"diff": diff,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/api/pending-count", response_class=JSONResponse)
|
||||
async def get_pending_upgrade_count():
|
||||
"""Get count of pending upgrades (for nav badge)."""
|
||||
return {"count": get_pending_count()}
|
||||
@@ -30,6 +30,11 @@
|
||||
<a href="/spark/ui" class="mc-test-link">SPARK</a>
|
||||
<a href="/marketplace/ui" class="mc-test-link">MARKET</a>
|
||||
<a href="/tools" class="mc-test-link">TOOLS</a>
|
||||
<a href="/swarm/events" class="mc-test-link">EVENTS</a>
|
||||
<a href="/lightning/ledger" class="mc-test-link">LEDGER</a>
|
||||
<a href="/memory" class="mc-test-link">MEMORY</a>
|
||||
<a href="/router/status" class="mc-test-link">ROUTER</a>
|
||||
<a href="/self-modify/queue" class="mc-test-link">UPGRADES</a>
|
||||
<a href="/creative/ui" class="mc-test-link">CREATIVE</a>
|
||||
<a href="/mobile" class="mc-test-link" title="Mobile-optimized view">MOBILE</a>
|
||||
<button id="enable-notifications" class="mc-test-link" style="background:none;cursor:pointer;" title="Enable notifications">🔔</button>
|
||||
@@ -55,6 +60,9 @@
|
||||
<a href="/spark/ui" class="mc-mobile-link">SPARK</a>
|
||||
<a href="/marketplace/ui" class="mc-mobile-link">MARKET</a>
|
||||
<a href="/tools" class="mc-mobile-link">TOOLS</a>
|
||||
<a href="/swarm/events" class="mc-mobile-link">EVENTS</a>
|
||||
<a href="/lightning/ledger" class="mc-mobile-link">LEDGER</a>
|
||||
<a href="/memory" class="mc-mobile-link">MEMORY</a>
|
||||
<a href="/creative/ui" class="mc-mobile-link">CREATIVE</a>
|
||||
<a href="/voice/button" class="mc-mobile-link">VOICE</a>
|
||||
<a href="/mobile" class="mc-mobile-link">MOBILE</a>
|
||||
|
||||
103
src/dashboard/templates/events.html
Normal file
103
src/dashboard/templates/events.html
Normal file
@@ -0,0 +1,103 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Event Log - Timmy Time{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="mc-panel">
|
||||
<div class="mc-panel-header">
|
||||
<h1 class="page-title">Event Log</h1>
|
||||
<p class="mc-text-secondary">System audit trail and activity history</p>
|
||||
</div>
|
||||
|
||||
<!-- Summary Stats -->
|
||||
<div class="mc-stats-row">
|
||||
{% for event_type, count in summary.items() %}
|
||||
<div class="mc-stat-card">
|
||||
<div class="mc-stat-value">{{ count }}</div>
|
||||
<div class="mc-stat-label">{{ event_type }}</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
{% if not summary %}
|
||||
<div class="mc-stat-card">
|
||||
<div class="mc-stat-value">-</div>
|
||||
<div class="mc-stat-label">No events (last hour)</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<!-- Filters -->
|
||||
<div class="mc-filters">
|
||||
<form method="get" action="/swarm/events" class="mc-filter-form">
|
||||
<select name="event_type" class="mc-select" onchange="this.form.submit()">
|
||||
<option value="">All Event Types</option>
|
||||
{% for et in event_types %}
|
||||
<option value="{{ et }}" {% if filter_type == et %}selected{% endif %}>{{ et }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
|
||||
{% if filter_task %}
|
||||
<input type="hidden" name="task_id" value="{{ filter_task }}">
|
||||
<span class="mc-filter-tag">Task: {{ filter_task[:8] }}... <a href="/swarm/events">✕</a></span>
|
||||
{% endif %}
|
||||
|
||||
{% if filter_agent %}
|
||||
<input type="hidden" name="agent_id" value="{{ filter_agent }}">
|
||||
<span class="mc-filter-tag">Agent: {{ filter_agent[:8] }}... <a href="/swarm/events">✕</a></span>
|
||||
{% endif %}
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<!-- Events Table -->
|
||||
<div class="mc-table-container">
|
||||
{% if events %}
|
||||
<table class="mc-table events-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Time</th>
|
||||
<th>Type</th>
|
||||
<th>Source</th>
|
||||
<th>Task</th>
|
||||
<th>Agent</th>
|
||||
<th>Data</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for event in events %}
|
||||
<tr class="event-row" data-type="{{ event.event_type.value }}">
|
||||
<td class="event-time">{{ event.timestamp[11:19] }}</td>
|
||||
<td>
|
||||
<span class="mc-badge mc-badge-{{ event.event_type.value.split('.')[0] }}">
|
||||
{{ event.event_type.value }}
|
||||
</span>
|
||||
</td>
|
||||
<td>{{ event.source }}</td>
|
||||
<td>
|
||||
{% if event.task_id %}
|
||||
<a href="/swarm/events?task_id={{ event.task_id }}">{{ event.task_id[:8] }}...</a>
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
{% if event.agent_id %}
|
||||
<a href="/swarm/events?agent_id={{ event.agent_id }}">{{ event.agent_id[:8] }}...</a>
|
||||
{% endif %}
|
||||
</td>
|
||||
<td class="event-data">
|
||||
{% if event.data %}
|
||||
<code>{{ event.data[:60] }}{% if event.data|length > 60 %}...{% endif %}</code>
|
||||
{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
{% else %}
|
||||
<div class="mc-empty-state">
|
||||
<p>No events found.</p>
|
||||
{% if filter_type or filter_task or filter_agent %}
|
||||
<p><a href="/swarm/events" class="mc-link">Clear filters</a></p>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
133
src/dashboard/templates/ledger.html
Normal file
133
src/dashboard/templates/ledger.html
Normal file
@@ -0,0 +1,133 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Lightning Ledger - Timmy Time{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="mc-panel">
|
||||
<div class="mc-panel-header">
|
||||
<h1 class="page-title">Lightning Ledger</h1>
|
||||
<p class="mc-text-secondary">Bitcoin Lightning Network transaction history</p>
|
||||
</div>
|
||||
|
||||
<!-- Balance Cards -->
|
||||
<div class="mc-stats-row balance-row">
|
||||
<div class="mc-stat-card sats-balance">
|
||||
<div class="mc-stat-label">Available Balance</div>
|
||||
<div class="mc-stat-value">{{ balance.available_sats }} <small>sats</small></div>
|
||||
</div>
|
||||
<div class="mc-stat-card">
|
||||
<div class="mc-stat-label">Total Received</div>
|
||||
<div class="mc-stat-value">{{ balance.incoming_total_sats }} <small>sats</small></div>
|
||||
</div>
|
||||
<div class="mc-stat-card">
|
||||
<div class="mc-stat-label">Total Sent</div>
|
||||
<div class="mc-stat-value">{{ balance.outgoing_total_sats }} <small>sats</small></div>
|
||||
</div>
|
||||
<div class="mc-stat-card">
|
||||
<div class="mc-stat-label">Fees Paid</div>
|
||||
<div class="mc-stat-value">{{ balance.fees_paid_sats }} <small>sats</small></div>
|
||||
</div>
|
||||
<div class="mc-stat-card net-balance">
|
||||
<div class="mc-stat-label">Net</div>
|
||||
<div class="mc-stat-value {% if balance.net_sats >= 0 %}positive{% else %}negative{% endif %}">
|
||||
{{ balance.net_sats }} <small>sats</small>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Pending Summary -->
|
||||
{% if balance.pending_incoming_sats > 0 or balance.pending_outgoing_sats > 0 %}
|
||||
<div class="mc-pending-row">
|
||||
{% if balance.pending_incoming_sats > 0 %}
|
||||
<span class="mc-pending-badge incoming">
|
||||
Pending incoming: {{ balance.pending_incoming_sats }} sats
|
||||
</span>
|
||||
{% endif %}
|
||||
{% if balance.pending_outgoing_sats > 0 %}
|
||||
<span class="mc-pending-badge outgoing">
|
||||
Pending outgoing: {{ balance.pending_outgoing_sats }} sats
|
||||
</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<!-- Filters -->
|
||||
<div class="mc-filters">
|
||||
<form method="get" action="/lightning/ledger" class="mc-filter-form">
|
||||
<select name="tx_type" class="mc-select" onchange="this.form.submit()">
|
||||
<option value="">All Types</option>
|
||||
{% for t in tx_types %}
|
||||
<option value="{{ t }}" {% if filter_type == t %}selected{% endif %}>{{ t }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
|
||||
<select name="status" class="mc-select" onchange="this.form.submit()">
|
||||
<option value="">All Statuses</option>
|
||||
{% for s in tx_statuses %}
|
||||
<option value="{{ s }}" {% if filter_status == s %}selected{% endif %}>{{ s }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<!-- Transactions Table -->
|
||||
<div class="mc-table-container">
|
||||
{% if transactions %}
|
||||
<table class="mc-table transactions-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Time</th>
|
||||
<th>Type</th>
|
||||
<th>Status</th>
|
||||
<th>Amount</th>
|
||||
<th>Hash</th>
|
||||
<th>Memo</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for tx in transactions %}
|
||||
<tr class="transaction-row" data-type="{{ tx.tx_type.value }}" data-status="{{ tx.status.value }}">
|
||||
<td>{{ tx.created_at[11:19] }}</td>
|
||||
<td>
|
||||
<span class="mc-badge mc-badge-{{ tx.tx_type.value }}">
|
||||
{{ tx.tx_type.value }}
|
||||
</span>
|
||||
</td>
|
||||
<td>
|
||||
<span class="mc-status mc-status-{{ tx.status.value }}">
|
||||
{{ tx.status.value }}
|
||||
</span>
|
||||
</td>
|
||||
<td class="amount {% if tx.tx_type.value == 'incoming' %}positive{% else %}negative{% endif %}">
|
||||
{% if tx.tx_type.value == 'incoming' %}+{% endif %}{{ tx.amount_sats }} sats
|
||||
</td>
|
||||
<td class="mono">{{ tx.payment_hash[:16] }}...</td>
|
||||
<td>{{ tx.memo }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
{% else %}
|
||||
<div class="mc-empty-state">
|
||||
<p>No transactions yet.</p>
|
||||
<p class="mc-text-secondary">Invoices and payments will appear here.</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<!-- Weekly Stats -->
|
||||
{% if stats %}
|
||||
<div class="mc-stats-section">
|
||||
<h3>Activity (Last 7 Days)</h3>
|
||||
<div class="mc-mini-chart">
|
||||
{% for date, day_stats in stats.items() %}
|
||||
<div class="mc-chart-bar" title="{{ date }}">
|
||||
<div class="bar incoming" style="height: {{ day_stats.incoming.count * 10 }}px"></div>
|
||||
<div class="bar outgoing" style="height: {{ day_stats.outgoing.count * 10 }}px"></div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endblock %}
|
||||
119
src/dashboard/templates/memory.html
Normal file
119
src/dashboard/templates/memory.html
Normal file
@@ -0,0 +1,119 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Memory Browser - Timmy Time{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="mc-panel">
|
||||
<div class="mc-panel-header">
|
||||
<h1 class="page-title">Memory Browser</h1>
|
||||
<p class="mc-text-secondary">Semantic search through conversation history and facts</p>
|
||||
</div>
|
||||
|
||||
<!-- Stats -->
|
||||
<div class="mc-stats-row">
|
||||
<div class="mc-stat-card">
|
||||
<div class="mc-stat-value">{{ stats.total_entries }}</div>
|
||||
<div class="mc-stat-label">Total Memories</div>
|
||||
</div>
|
||||
<div class="mc-stat-card">
|
||||
<div class="mc-stat-value">{{ stats.with_embeddings }}</div>
|
||||
<div class="mc-stat-label">With Embeddings</div>
|
||||
</div>
|
||||
<div class="mc-stat-card">
|
||||
<div class="mc-stat-value">{% if stats.has_embedding_model %}✓{% else %}○{% endif %}</div>
|
||||
<div class="mc-stat-label">AI Search</div>
|
||||
</div>
|
||||
{% for type, count in stats.by_type.items() %}
|
||||
<div class="mc-stat-card">
|
||||
<div class="mc-stat-value">{{ count }}</div>
|
||||
<div class="mc-stat-label">{{ type }}</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
<!-- Search -->
|
||||
<div class="mc-search-section">
|
||||
<form method="get" action="/memory" class="mc-search-form">
|
||||
<input
|
||||
type="search"
|
||||
name="query"
|
||||
class="mc-search-input"
|
||||
placeholder="Search memories..."
|
||||
value="{{ query or '' }}"
|
||||
autofocus
|
||||
>
|
||||
<button type="submit" class="mc-btn mc-btn-primary">Search</button>
|
||||
</form>
|
||||
|
||||
{% if query %}
|
||||
<p class="mc-search-info">Searching for: "{{ query }}"</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<!-- Search Results -->
|
||||
{% if query %}
|
||||
<div class="mc-results-section">
|
||||
<h3>Search Results</h3>
|
||||
|
||||
{% if results %}
|
||||
<div class="memory-results">
|
||||
{% for mem in results %}
|
||||
<div class="memory-entry" data-relevance="{{ mem.relevance_score }}">
|
||||
<div class="memory-header">
|
||||
<span class="memory-source">{{ mem.source }}</span>
|
||||
<span class="memory-type mc-badge">{{ mem.context_type }}</span>
|
||||
{% if mem.relevance_score %}
|
||||
<span class="memory-score">{{ "%.2f"|format(mem.relevance_score) }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="memory-content">{{ mem.content }}</div>
|
||||
<div class="memory-meta">
|
||||
<span class="memory-time">{{ mem.timestamp[11:16] }}</span>
|
||||
{% if mem.agent_id %}
|
||||
<span class="memory-agent">Agent: {{ mem.agent_id[:8] }}...</span>
|
||||
{% endif %}
|
||||
{% if mem.task_id %}
|
||||
<span class="memory-task">Task: {{ mem.task_id[:8] }}...</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="mc-empty-state">
|
||||
<p>No results found for "{{ query }}"</p>
|
||||
<p class="mc-text-secondary">Try different keywords or check spelling.</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<!-- Personal Facts -->
|
||||
<div class="mc-facts-section">
|
||||
<div class="mc-section-header">
|
||||
<h3>Personal Facts</h3>
|
||||
<button class="mc-btn mc-btn-small" onclick="document.getElementById('add-fact-form').style.display='block'">
|
||||
+ Add Fact
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<form id="add-fact-form" class="mc-inline-form" method="post" action="/memory/fact" style="display:none;" hx-post="/memory/fact" hx-target=".mc-facts-list">
|
||||
<input type="text" name="fact" class="mc-input" placeholder="Enter a fact..." required>
|
||||
<button type="submit" class="mc-btn mc-btn-primary">Save</button>
|
||||
<button type="button" class="mc-btn" onclick="document.getElementById('add-fact-form').style.display='none'">Cancel</button>
|
||||
</form>
|
||||
|
||||
<div class="mc-facts-list">
|
||||
{% if facts %}
|
||||
<ul class="mc-fact-list">
|
||||
{% for fact in facts %}
|
||||
<li class="memory-fact">{{ fact }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% else %}
|
||||
<p class="mc-text-secondary">No personal facts stored yet.</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
202
src/dashboard/templates/router_status.html
Normal file
202
src/dashboard/templates/router_status.html
Normal file
@@ -0,0 +1,202 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Router Status - Timmy Time{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="mc-panel">
|
||||
<div class="mc-panel-header">
|
||||
<h1 class="page-title">Router Status</h1>
|
||||
<p class="mc-text-secondary">LLM provider health and metrics</p>
|
||||
</div>
|
||||
|
||||
<!-- Overall Stats -->
|
||||
<div class="mc-stats-row">
|
||||
<div class="mc-stat-card">
|
||||
<div class="mc-stat-value">{{ providers|length }}</div>
|
||||
<div class="mc-stat-label">Providers</div>
|
||||
</div>
|
||||
<div class="mc-stat-card">
|
||||
<div class="mc-stat-value">{{ total_requests }}</div>
|
||||
<div class="mc-stat-label">Total Requests</div>
|
||||
</div>
|
||||
<div class="mc-stat-card">
|
||||
<div class="mc-stat-value">{{ total_success }}</div>
|
||||
<div class="mc-stat-label">Successful</div>
|
||||
</div>
|
||||
<div class="mc-stat-card">
|
||||
<div class="mc-stat-value">{{ total_failed }}</div>
|
||||
<div class="mc-stat-label">Failed</div>
|
||||
</div>
|
||||
<div class="mc-stat-card">
|
||||
<div class="mc-stat-value">{{ avg_latency_ms }}<small>ms</small></div>
|
||||
<div class="mc-stat-label">Avg Latency</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Preferred Provider -->
|
||||
{% if preferred_provider %}
|
||||
<div class="mc-alert mc-alert-success">
|
||||
<strong>Preferred Provider:</strong> {{ preferred_provider }}
|
||||
<span class="mc-badge mc-badge-success">ACTIVE</span>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="mc-alert mc-alert-warning">
|
||||
<strong>Warning:</strong> No healthy providers available
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<!-- Provider Cards -->
|
||||
<div class="mc-providers-grid">
|
||||
{% for provider in providers %}
|
||||
<div class="mc-provider-card provider-{{ provider.status }}">
|
||||
<div class="provider-header">
|
||||
<h3>{{ provider.name }}</h3>
|
||||
<span class="mc-badge mc-badge-{{ provider.status }}">
|
||||
{{ provider.status }}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div class="provider-meta">
|
||||
<span class="provider-type">{{ provider.type }}</span>
|
||||
<span class="provider-priority">Priority: {{ provider.priority }}</span>
|
||||
{% if not provider.enabled %}
|
||||
<span class="mc-badge mc-badge-disabled">DISABLED</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<div class="provider-circuit">
|
||||
Circuit: <span class="circuit-{{ provider.circuit_state }}">{{ provider.circuit_state }}</span>
|
||||
</div>
|
||||
|
||||
<div class="provider-metrics">
|
||||
<div class="metric">
|
||||
<span class="metric-value">{{ provider.metrics.total }}</span>
|
||||
<span class="metric-label">Requests</span>
|
||||
</div>
|
||||
<div class="metric">
|
||||
<span class="metric-value">{{ provider.metrics.success }}</span>
|
||||
<span class="metric-label">Success</span>
|
||||
</div>
|
||||
<div class="metric">
|
||||
<span class="metric-value">{{ provider.metrics.failed }}</span>
|
||||
<span class="metric-label">Failed</span>
|
||||
</div>
|
||||
<div class="metric">
|
||||
<span class="metric-value">{{ provider.metrics.avg_latency_ms }}ms</span>
|
||||
<span class="metric-label">Latency</span>
|
||||
</div>
|
||||
<div class="metric">
|
||||
<span class="metric-value">{{ "%.1f"|format(provider.metrics.error_rate * 100) }}%</span>
|
||||
<span class="metric-label">Error Rate</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if provider.metrics.error_rate > 0.1 %}
|
||||
<div class="mc-alert mc-alert-warning mc-alert-small">
|
||||
High error rate detected
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
{% if not providers %}
|
||||
<div class="mc-empty-state">
|
||||
<p>No providers configured.</p>
|
||||
<p class="mc-text-secondary">Check config/providers.yaml</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.mc-providers-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
|
||||
gap: 1rem;
|
||||
margin-top: 1rem;
|
||||
}
|
||||
|
||||
.mc-provider-card {
|
||||
background: rgba(10, 15, 30, 0.6);
|
||||
border: 1px solid var(--mc-border);
|
||||
border-radius: 0.5rem;
|
||||
padding: 1rem;
|
||||
}
|
||||
|
||||
.mc-provider-card.provider-healthy {
|
||||
border-left: 4px solid #28a745;
|
||||
}
|
||||
|
||||
.mc-provider-card.provider-degraded {
|
||||
border-left: 4px solid #ffc107;
|
||||
}
|
||||
|
||||
.mc-provider-card.provider-unhealthy {
|
||||
border-left: 4px solid #dc3545;
|
||||
}
|
||||
|
||||
.provider-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
.provider-header h3 {
|
||||
margin: 0;
|
||||
font-size: 1.1rem;
|
||||
}
|
||||
|
||||
.provider-meta {
|
||||
display: flex;
|
||||
gap: 0.5rem;
|
||||
margin-bottom: 0.5rem;
|
||||
font-size: 0.85rem;
|
||||
color: var(--mc-text-secondary);
|
||||
}
|
||||
|
||||
.provider-circuit {
|
||||
font-size: 0.85rem;
|
||||
margin-bottom: 0.75rem;
|
||||
padding: 0.25rem 0.5rem;
|
||||
background: rgba(0,0,0,0.3);
|
||||
border-radius: 0.25rem;
|
||||
}
|
||||
|
||||
.circuit-closed { color: #28a745; }
|
||||
.circuit-open { color: #dc3545; }
|
||||
.circuit-half_open { color: #ffc107; }
|
||||
|
||||
.provider-metrics {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(5, 1fr);
|
||||
gap: 0.5rem;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.metric {
|
||||
padding: 0.5rem;
|
||||
background: rgba(0,0,0,0.2);
|
||||
border-radius: 0.25rem;
|
||||
}
|
||||
|
||||
.metric-value {
|
||||
display: block;
|
||||
font-size: 1.1rem;
|
||||
font-weight: 600;
|
||||
color: var(--mc-gold);
|
||||
}
|
||||
|
||||
.metric-label {
|
||||
display: block;
|
||||
font-size: 0.75rem;
|
||||
color: var(--mc-text-secondary);
|
||||
}
|
||||
|
||||
.mc-alert-small {
|
||||
margin-top: 0.75rem;
|
||||
padding: 0.5rem;
|
||||
font-size: 0.85rem;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
@@ -35,6 +35,89 @@
|
||||
.swarm-title { font-size: 1rem; }
|
||||
.swarm-log-box { height: 160px; font-size: 11px; }
|
||||
}
|
||||
|
||||
/* Activity Feed Styles */
|
||||
.activity-feed-panel {
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
.activity-feed {
|
||||
max-height: 300px;
|
||||
overflow-y: auto;
|
||||
background: rgba(24, 10, 45, 0.6);
|
||||
padding: 12px;
|
||||
border-radius: var(--radius-md);
|
||||
border: 1px solid var(--border);
|
||||
}
|
||||
.activity-item {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
gap: 10px;
|
||||
padding: 8px 0;
|
||||
border-bottom: 1px solid rgba(255,255,255,0.05);
|
||||
animation: fadeIn 0.3s ease;
|
||||
}
|
||||
.activity-item:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
@keyframes fadeIn {
|
||||
from { opacity: 0; transform: translateY(-5px); }
|
||||
to { opacity: 1; transform: translateY(0); }
|
||||
}
|
||||
.activity-icon {
|
||||
font-size: 16px;
|
||||
flex-shrink: 0;
|
||||
width: 24px;
|
||||
text-align: center;
|
||||
}
|
||||
.activity-content {
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
}
|
||||
.activity-label {
|
||||
font-weight: 600;
|
||||
color: var(--text-bright);
|
||||
font-size: 12px;
|
||||
}
|
||||
.activity-desc {
|
||||
color: var(--text-dim);
|
||||
font-size: 11px;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
.activity-meta {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
font-size: 10px;
|
||||
color: var(--text-dim);
|
||||
margin-top: 2px;
|
||||
}
|
||||
.activity-time {
|
||||
font-family: var(--font);
|
||||
color: var(--amber);
|
||||
}
|
||||
.activity-source {
|
||||
opacity: 0.7;
|
||||
}
|
||||
.activity-empty {
|
||||
color: var(--text-dim);
|
||||
font-size: 12px;
|
||||
text-align: center;
|
||||
padding: 20px;
|
||||
}
|
||||
.activity-badge {
|
||||
display: inline-block;
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
background: #28a745;
|
||||
border-radius: 50%;
|
||||
margin-left: 8px;
|
||||
animation: pulse 2s infinite;
|
||||
}
|
||||
@keyframes pulse {
|
||||
0%, 100% { opacity: 1; }
|
||||
50% { opacity: 0.5; }
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
@@ -76,6 +159,19 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Activity Feed Panel -->
|
||||
<div class="card mc-panel activity-feed-panel">
|
||||
<div class="card-header mc-panel-header">
|
||||
// LIVE ACTIVITY FEED
|
||||
<span class="activity-badge" id="activity-badge"></span>
|
||||
</div>
|
||||
<div class="card-body p-0">
|
||||
<div class="activity-feed" id="activity-feed">
|
||||
<div class="activity-empty">Waiting for events...</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card mc-panel">
|
||||
<div class="card-header mc-panel-header">// SWARM LOG</div>
|
||||
<div class="card-body p-0">
|
||||
@@ -125,6 +221,16 @@ function connect() {
|
||||
}
|
||||
|
||||
function handleMessage(message) {
|
||||
// Handle activity feed events (from event_log broadcaster)
|
||||
if (message.type === 'event' && message.payload) {
|
||||
addActivityEvent(message.payload);
|
||||
// Also add to log
|
||||
var evt = message.payload;
|
||||
var logMsg = evt.event_type + ': ' + (evt.source || '');
|
||||
addLog(logMsg, 'info');
|
||||
return;
|
||||
}
|
||||
|
||||
if (message.type === 'initial_state' || message.type === 'state_update') {
|
||||
var data = message.data;
|
||||
document.getElementById('stat-agents').textContent = data.agents.total;
|
||||
@@ -158,6 +264,87 @@ function handleMessage(message) {
|
||||
}
|
||||
}
|
||||
|
||||
// Activity Feed Functions
|
||||
const EVENT_ICONS = {
|
||||
'task.created': '📝',
|
||||
'task.bidding': '⏳',
|
||||
'task.assigned': '👤',
|
||||
'task.started': '▶️',
|
||||
'task.completed': '✅',
|
||||
'task.failed': '❌',
|
||||
'agent.joined': '🟢',
|
||||
'agent.left': '🔴',
|
||||
'bid.submitted': '💰',
|
||||
'auction.closed': '🏁',
|
||||
'tool.called': '🔧',
|
||||
'system.error': '⚠️',
|
||||
};
|
||||
|
||||
const EVENT_LABELS = {
|
||||
'task.created': 'New task',
|
||||
'task.assigned': 'Task assigned',
|
||||
'task.completed': 'Task completed',
|
||||
'task.failed': 'Task failed',
|
||||
'agent.joined': 'Agent joined',
|
||||
'agent.left': 'Agent left',
|
||||
'bid.submitted': 'Bid submitted',
|
||||
};
|
||||
|
||||
function addActivityEvent(evt) {
|
||||
var container = document.getElementById('activity-feed');
|
||||
|
||||
// Remove empty message if present
|
||||
var empty = container.querySelector('.activity-empty');
|
||||
if (empty) empty.remove();
|
||||
|
||||
// Create activity item
|
||||
var item = document.createElement('div');
|
||||
item.className = 'activity-item';
|
||||
|
||||
var icon = EVENT_ICONS[evt.event_type] || '•';
|
||||
var label = EVENT_LABELS[evt.event_type] || evt.event_type;
|
||||
var time = evt.timestamp ? evt.timestamp.split('T')[1].slice(0, 8) : '--:--:--';
|
||||
|
||||
// Build description from data
|
||||
var desc = '';
|
||||
if (evt.data) {
|
||||
try {
|
||||
var data = typeof evt.data === 'string' ? JSON.parse(evt.data) : evt.data;
|
||||
if (data.description) desc = data.description.slice(0, 50);
|
||||
else if (data.reason) desc = data.reason.slice(0, 50);
|
||||
} catch(e) {}
|
||||
}
|
||||
|
||||
item.innerHTML = `
|
||||
<div class="activity-icon">${icon}</div>
|
||||
<div class="activity-content">
|
||||
<div class="activity-label">${label}</div>
|
||||
${desc ? `<div class="activity-desc">${desc}</div>` : ''}
|
||||
<div class="activity-meta">
|
||||
<span class="activity-time">${time}</span>
|
||||
<span class="activity-source">${evt.source || 'system'}</span>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
|
||||
// Add to top
|
||||
container.insertBefore(item, container.firstChild);
|
||||
|
||||
// Keep only last 50 items
|
||||
while (container.children.length > 50) {
|
||||
container.removeChild(container.lastChild);
|
||||
}
|
||||
|
||||
// Update badge
|
||||
var badge = document.getElementById('activity-badge');
|
||||
if (badge) {
|
||||
badge.style.background = '#28a745';
|
||||
setTimeout(() => {
|
||||
badge.style.background = '';
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
function refreshStats() {
|
||||
fetch('/swarm').then(function(r) { return r.json(); }).then(function(data) {
|
||||
document.getElementById('stat-agents').textContent = data.agents || 0;
|
||||
|
||||
290
src/dashboard/templates/upgrade_queue.html
Normal file
290
src/dashboard/templates/upgrade_queue.html
Normal file
@@ -0,0 +1,290 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Upgrade Queue - Timmy Time{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="mc-panel">
|
||||
<div class="mc-panel-header">
|
||||
<h1 class="page-title">Upgrade Queue</h1>
|
||||
<p class="mc-text-secondary">Review and approve self-modification proposals</p>
|
||||
</div>
|
||||
|
||||
<!-- Pending Upgrades -->
|
||||
<div class="mc-section">
|
||||
<h2 class="mc-section-title">
|
||||
Pending Upgrades
|
||||
{% if pending_count > 0 %}
|
||||
<span class="mc-badge mc-badge-warning">{{ pending_count }}</span>
|
||||
{% endif %}
|
||||
</h2>
|
||||
|
||||
{% if pending %}
|
||||
<div class="upgrades-list">
|
||||
{% for upgrade in pending %}
|
||||
<div class="upgrade-card upgrade-pending" data-id="{{ upgrade.id }}">
|
||||
<div class="upgrade-header">
|
||||
<h3>{{ upgrade.description }}</h3>
|
||||
<span class="mc-badge mc-badge-warning">PENDING</span>
|
||||
</div>
|
||||
|
||||
<div class="upgrade-meta">
|
||||
<span class="upgrade-branch">Branch: {{ upgrade.branch_name }}</span>
|
||||
<span class="upgrade-time">Proposed: {{ upgrade.proposed_at[11:16] }}</span>
|
||||
</div>
|
||||
|
||||
<div class="upgrade-files">
|
||||
Files: {{ upgrade.files_changed|join(', ') }}
|
||||
</div>
|
||||
|
||||
<div class="upgrade-test-status">
|
||||
{% if upgrade.test_passed %}
|
||||
<span class="test-passed">✓ Tests passed</span>
|
||||
{% else %}
|
||||
<span class="test-failed">✗ Tests failed</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<div class="upgrade-actions">
|
||||
<button class="mc-btn mc-btn-primary" onclick="approveUpgrade('{{ upgrade.id }}')">
|
||||
Approve
|
||||
</button>
|
||||
<button class="mc-btn" onclick="rejectUpgrade('{{ upgrade.id }}')">
|
||||
Reject
|
||||
</button>
|
||||
<a href="/self-modify/queue/{{ upgrade.id }}/diff" class="mc-btn mc-btn-secondary">
|
||||
View Diff
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="mc-empty-state">
|
||||
<p>No pending upgrades.</p>
|
||||
<p class="mc-text-secondary">Proposed modifications will appear here for review.</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<!-- Approved (Waiting to Apply) -->
|
||||
{% if approved %}
|
||||
<div class="mc-section">
|
||||
<h2 class="mc-section-title">Approved (Ready to Apply)</h2>
|
||||
<div class="upgrades-list">
|
||||
{% for upgrade in approved %}
|
||||
<div class="upgrade-card upgrade-approved">
|
||||
<div class="upgrade-header">
|
||||
<h3>{{ upgrade.description }}</h3>
|
||||
<span class="mc-badge mc-badge-success">APPROVED</span>
|
||||
</div>
|
||||
<div class="upgrade-actions">
|
||||
<button class="mc-btn mc-btn-primary" onclick="applyUpgrade('{{ upgrade.id }}')">
|
||||
Apply Now
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<!-- History -->
|
||||
<div class="mc-section">
|
||||
<h2 class="mc-section-title">History</h2>
|
||||
|
||||
{% if applied %}
|
||||
<h4>Applied</h4>
|
||||
<div class="upgrades-list upgrades-history">
|
||||
{% for upgrade in applied %}
|
||||
<div class="upgrade-card upgrade-applied">
|
||||
<span class="upgrade-desc">{{ upgrade.description }}</span>
|
||||
<span class="mc-badge mc-badge-success">APPLIED</span>
|
||||
<span class="upgrade-time">{{ upgrade.applied_at[11:16] if upgrade.applied_at else '' }}</span>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if rejected %}
|
||||
<h4>Rejected</h4>
|
||||
<div class="upgrades-list upgrades-history">
|
||||
{% for upgrade in rejected %}
|
||||
<div class="upgrade-card upgrade-rejected">
|
||||
<span class="upgrade-desc">{{ upgrade.description }}</span>
|
||||
<span class="mc-badge mc-badge-secondary">REJECTED</span>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if failed %}
|
||||
<h4>Failed</h4>
|
||||
<div class="upgrades-list upgrades-history">
|
||||
{% for upgrade in failed %}
|
||||
<div class="upgrade-card upgrade-failed">
|
||||
<span class="upgrade-desc">{{ upgrade.description }}</span>
|
||||
<span class="mc-badge mc-badge-danger">FAILED</span>
|
||||
<span class="upgrade-error" title="{{ upgrade.error_message }}">⚠️</span>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
async function approveUpgrade(id) {
|
||||
if (!confirm('Approve this upgrade?')) return;
|
||||
|
||||
const response = await fetch(`/self-modify/queue/${id}/approve`, {
|
||||
method: 'POST',
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
window.location.reload();
|
||||
} else {
|
||||
alert('Failed to approve: ' + await response.text());
|
||||
}
|
||||
}
|
||||
|
||||
async function rejectUpgrade(id) {
|
||||
if (!confirm('Reject this upgrade? The branch will be deleted.')) return;
|
||||
|
||||
const response = await fetch(`/self-modify/queue/${id}/reject`, {
|
||||
method: 'POST',
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
window.location.reload();
|
||||
} else {
|
||||
alert('Failed to reject: ' + await response.text());
|
||||
}
|
||||
}
|
||||
|
||||
async function applyUpgrade(id) {
|
||||
if (!confirm('Apply this upgrade? This will merge to main.')) return;
|
||||
|
||||
const response = await fetch(`/self-modify/queue/${id}/apply`, {
|
||||
method: 'POST',
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
alert('Upgrade applied successfully!');
|
||||
window.location.reload();
|
||||
} else {
|
||||
const error = await response.text();
|
||||
alert('Failed to apply: ' + error);
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style>
|
||||
.mc-section {
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
|
||||
.mc-section-title {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.upgrades-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
.upgrade-card {
|
||||
background: rgba(10, 15, 30, 0.6);
|
||||
border: 1px solid var(--mc-border);
|
||||
border-radius: 0.5rem;
|
||||
padding: 1rem;
|
||||
}
|
||||
|
||||
.upgrade-pending {
|
||||
border-left: 4px solid #ffc107;
|
||||
}
|
||||
|
||||
.upgrade-approved {
|
||||
border-left: 4px solid #17a2b8;
|
||||
}
|
||||
|
||||
.upgrade-applied {
|
||||
border-left: 4px solid #28a745;
|
||||
}
|
||||
|
||||
.upgrade-rejected {
|
||||
border-left: 4px solid #6c757d;
|
||||
}
|
||||
|
||||
.upgrade-failed {
|
||||
border-left: 4px solid #dc3545;
|
||||
}
|
||||
|
||||
.upgrade-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: flex-start;
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
.upgrade-header h3 {
|
||||
margin: 0;
|
||||
font-size: 1.1rem;
|
||||
}
|
||||
|
||||
.upgrade-meta {
|
||||
display: flex;
|
||||
gap: 1rem;
|
||||
font-size: 0.85rem;
|
||||
color: var(--mc-text-secondary);
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
.upgrade-files {
|
||||
font-size: 0.9rem;
|
||||
margin-bottom: 0.5rem;
|
||||
font-family: monospace;
|
||||
}
|
||||
|
||||
.upgrade-test-status {
|
||||
margin-bottom: 0.75rem;
|
||||
}
|
||||
|
||||
.test-passed {
|
||||
color: #28a745;
|
||||
}
|
||||
|
||||
.test-failed {
|
||||
color: #dc3545;
|
||||
}
|
||||
|
||||
.upgrade-actions {
|
||||
display: flex;
|
||||
gap: 0.5rem;
|
||||
}
|
||||
|
||||
.upgrades-history .upgrade-card {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 1rem;
|
||||
padding: 0.75rem 1rem;
|
||||
}
|
||||
|
||||
.upgrade-desc {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.upgrade-time {
|
||||
font-size: 0.85rem;
|
||||
color: var(--mc-text-secondary);
|
||||
}
|
||||
|
||||
.upgrade-error {
|
||||
color: #dc3545;
|
||||
cursor: help;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
186
src/events/broadcaster.py
Normal file
186
src/events/broadcaster.py
Normal file
@@ -0,0 +1,186 @@
|
||||
"""Event Broadcaster - bridges event_log to WebSocket clients.
|
||||
|
||||
When events are logged, they are broadcast to all connected dashboard clients
|
||||
via WebSocket for real-time activity feed updates.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from swarm.event_log import EventLogEntry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EventBroadcaster:
|
||||
"""Broadcasts events to WebSocket clients.
|
||||
|
||||
Usage:
|
||||
from events.broadcaster import event_broadcaster
|
||||
event_broadcaster.broadcast(event)
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._ws_manager: Optional = None
|
||||
|
||||
def _get_ws_manager(self):
|
||||
"""Lazy import to avoid circular deps."""
|
||||
if self._ws_manager is None:
|
||||
try:
|
||||
from ws_manager.handler import ws_manager
|
||||
self._ws_manager = ws_manager
|
||||
except Exception as exc:
|
||||
logger.debug("WebSocket manager not available: %s", exc)
|
||||
return self._ws_manager
|
||||
|
||||
async def broadcast(self, event: EventLogEntry) -> int:
|
||||
"""Broadcast an event to all connected WebSocket clients.
|
||||
|
||||
Args:
|
||||
event: The event to broadcast
|
||||
|
||||
Returns:
|
||||
Number of clients notified
|
||||
"""
|
||||
ws_manager = self._get_ws_manager()
|
||||
if not ws_manager:
|
||||
return 0
|
||||
|
||||
# Build message payload
|
||||
payload = {
|
||||
"type": "event",
|
||||
"payload": {
|
||||
"id": event.id,
|
||||
"event_type": event.event_type.value,
|
||||
"source": event.source,
|
||||
"task_id": event.task_id,
|
||||
"agent_id": event.agent_id,
|
||||
"timestamp": event.timestamp,
|
||||
"data": event.data,
|
||||
}
|
||||
}
|
||||
|
||||
try:
|
||||
# Broadcast to all connected clients
|
||||
count = await ws_manager.broadcast_json(payload)
|
||||
logger.debug("Broadcasted event %s to %d clients", event.id[:8], count)
|
||||
return count
|
||||
except Exception as exc:
|
||||
logger.error("Failed to broadcast event: %s", exc)
|
||||
return 0
|
||||
|
||||
def broadcast_sync(self, event: EventLogEntry) -> None:
|
||||
"""Synchronous wrapper for broadcast.
|
||||
|
||||
Use this from synchronous code - it schedules the async broadcast
|
||||
in the event loop if one is running.
|
||||
"""
|
||||
try:
|
||||
loop = asyncio.get_running_loop()
|
||||
# Schedule in background, don't wait
|
||||
asyncio.create_task(self.broadcast(event))
|
||||
except RuntimeError:
|
||||
# No event loop running, skip broadcast
|
||||
pass
|
||||
|
||||
|
||||
# Global singleton
|
||||
event_broadcaster = EventBroadcaster()
|
||||
|
||||
|
||||
# Event type to icon/emoji mapping
|
||||
EVENT_ICONS = {
|
||||
"task.created": "📝",
|
||||
"task.bidding": "⏳",
|
||||
"task.assigned": "👤",
|
||||
"task.started": "▶️",
|
||||
"task.completed": "✅",
|
||||
"task.failed": "❌",
|
||||
"agent.joined": "🟢",
|
||||
"agent.left": "🔴",
|
||||
"agent.status_changed": "🔄",
|
||||
"bid.submitted": "💰",
|
||||
"auction.closed": "🏁",
|
||||
"tool.called": "🔧",
|
||||
"tool.completed": "⚙️",
|
||||
"tool.failed": "💥",
|
||||
"system.error": "⚠️",
|
||||
"system.warning": "🔶",
|
||||
"system.info": "ℹ️",
|
||||
}
|
||||
|
||||
EVENT_LABELS = {
|
||||
"task.created": "New task",
|
||||
"task.bidding": "Bidding open",
|
||||
"task.assigned": "Task assigned",
|
||||
"task.started": "Task started",
|
||||
"task.completed": "Task completed",
|
||||
"task.failed": "Task failed",
|
||||
"agent.joined": "Agent joined",
|
||||
"agent.left": "Agent left",
|
||||
"agent.status_changed": "Status changed",
|
||||
"bid.submitted": "Bid submitted",
|
||||
"auction.closed": "Auction closed",
|
||||
"tool.called": "Tool called",
|
||||
"tool.completed": "Tool completed",
|
||||
"tool.failed": "Tool failed",
|
||||
"system.error": "Error",
|
||||
"system.warning": "Warning",
|
||||
"system.info": "Info",
|
||||
}
|
||||
|
||||
|
||||
def get_event_icon(event_type: str) -> str:
|
||||
"""Get emoji icon for event type."""
|
||||
return EVENT_ICONS.get(event_type, "•")
|
||||
|
||||
|
||||
def get_event_label(event_type: str) -> str:
|
||||
"""Get human-readable label for event type."""
|
||||
return EVENT_LABELS.get(event_type, event_type)
|
||||
|
||||
|
||||
def format_event_for_display(event: EventLogEntry) -> dict:
|
||||
"""Format event for display in activity feed.
|
||||
|
||||
Returns dict with display-friendly fields.
|
||||
"""
|
||||
data = event.data or {}
|
||||
|
||||
# Build description based on event type
|
||||
description = ""
|
||||
if event.event_type.value == "task.created":
|
||||
desc = data.get("description", "")
|
||||
description = desc[:60] + "..." if len(desc) > 60 else desc
|
||||
elif event.event_type.value == "task.assigned":
|
||||
agent = event.agent_id[:8] if event.agent_id else "unknown"
|
||||
bid = data.get("bid_sats", "?")
|
||||
description = f"to {agent} ({bid} sats)"
|
||||
elif event.event_type.value == "bid.submitted":
|
||||
bid = data.get("bid_sats", "?")
|
||||
description = f"{bid} sats"
|
||||
elif event.event_type.value == "agent.joined":
|
||||
persona = data.get("persona_id", "")
|
||||
description = f"Persona: {persona}" if persona else "New agent"
|
||||
else:
|
||||
# Generic: use any string data
|
||||
for key in ["message", "reason", "description"]:
|
||||
if key in data:
|
||||
val = str(data[key])
|
||||
description = val[:60] + "..." if len(val) > 60 else val
|
||||
break
|
||||
|
||||
return {
|
||||
"id": event.id,
|
||||
"icon": get_event_icon(event.event_type.value),
|
||||
"label": get_event_label(event.event_type.value),
|
||||
"type": event.event_type.value,
|
||||
"source": event.source,
|
||||
"description": description,
|
||||
"timestamp": event.timestamp,
|
||||
"time_short": event.timestamp[11:19] if event.timestamp else "",
|
||||
"task_id": event.task_id,
|
||||
"agent_id": event.agent_id,
|
||||
}
|
||||
488
src/lightning/ledger.py
Normal file
488
src/lightning/ledger.py
Normal file
@@ -0,0 +1,488 @@
|
||||
"""Lightning Network transaction ledger.
|
||||
|
||||
Tracks all Lightning payments in SQLite for audit, accounting, and dashboard display.
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
import uuid
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
DB_PATH = Path("data/swarm.db")
|
||||
|
||||
|
||||
class TransactionType(str, Enum):
|
||||
"""Types of Lightning transactions."""
|
||||
INCOMING = "incoming" # Invoice created (we're receiving)
|
||||
OUTGOING = "outgoing" # Payment sent (we're paying)
|
||||
|
||||
|
||||
class TransactionStatus(str, Enum):
|
||||
"""Status of a transaction."""
|
||||
PENDING = "pending"
|
||||
SETTLED = "settled"
|
||||
FAILED = "failed"
|
||||
EXPIRED = "expired"
|
||||
|
||||
|
||||
@dataclass
|
||||
class LedgerEntry:
|
||||
"""A Lightning transaction record."""
|
||||
id: str = field(default_factory=lambda: str(uuid.uuid4()))
|
||||
tx_type: TransactionType = TransactionType.INCOMING
|
||||
status: TransactionStatus = TransactionStatus.PENDING
|
||||
payment_hash: str = "" # Lightning payment hash
|
||||
amount_sats: int = 0
|
||||
memo: str = "" # Description/purpose
|
||||
invoice: Optional[str] = None # BOLT11 invoice string
|
||||
preimage: Optional[str] = None # Payment preimage (proof of payment)
|
||||
source: str = "" # Component that created the transaction
|
||||
task_id: Optional[str] = None # Associated task, if any
|
||||
agent_id: Optional[str] = None # Associated agent, if any
|
||||
created_at: str = field(
|
||||
default_factory=lambda: datetime.now(timezone.utc).isoformat()
|
||||
)
|
||||
settled_at: Optional[str] = None
|
||||
fee_sats: int = 0 # Routing fee paid
|
||||
|
||||
|
||||
def _get_conn() -> sqlite3.Connection:
|
||||
DB_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
conn = sqlite3.connect(str(DB_PATH))
|
||||
conn.row_factory = sqlite3.Row
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS ledger (
|
||||
id TEXT PRIMARY KEY,
|
||||
tx_type TEXT NOT NULL,
|
||||
status TEXT NOT NULL DEFAULT 'pending',
|
||||
payment_hash TEXT UNIQUE NOT NULL,
|
||||
amount_sats INTEGER NOT NULL,
|
||||
memo TEXT,
|
||||
invoice TEXT,
|
||||
preimage TEXT,
|
||||
source TEXT NOT NULL,
|
||||
task_id TEXT,
|
||||
agent_id TEXT,
|
||||
created_at TEXT NOT NULL,
|
||||
settled_at TEXT,
|
||||
fee_sats INTEGER DEFAULT 0
|
||||
)
|
||||
"""
|
||||
)
|
||||
# Create indexes for common queries
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_ledger_status ON ledger(status)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_ledger_hash ON ledger(payment_hash)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_ledger_task ON ledger(task_id)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_ledger_agent ON ledger(agent_id)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_ledger_created ON ledger(created_at)"
|
||||
)
|
||||
conn.commit()
|
||||
return conn
|
||||
|
||||
|
||||
def create_invoice_entry(
|
||||
payment_hash: str,
|
||||
amount_sats: int,
|
||||
memo: str = "",
|
||||
invoice: Optional[str] = None,
|
||||
source: str = "system",
|
||||
task_id: Optional[str] = None,
|
||||
agent_id: Optional[str] = None,
|
||||
) -> LedgerEntry:
|
||||
"""Record a new incoming invoice (we're receiving payment).
|
||||
|
||||
Args:
|
||||
payment_hash: Lightning payment hash
|
||||
amount_sats: Invoice amount in satoshis
|
||||
memo: Payment description
|
||||
invoice: Full BOLT11 invoice string
|
||||
source: Component that created the invoice
|
||||
task_id: Associated task ID
|
||||
agent_id: Associated agent ID
|
||||
|
||||
Returns:
|
||||
The created LedgerEntry
|
||||
"""
|
||||
entry = LedgerEntry(
|
||||
tx_type=TransactionType.INCOMING,
|
||||
status=TransactionStatus.PENDING,
|
||||
payment_hash=payment_hash,
|
||||
amount_sats=amount_sats,
|
||||
memo=memo,
|
||||
invoice=invoice,
|
||||
source=source,
|
||||
task_id=task_id,
|
||||
agent_id=agent_id,
|
||||
)
|
||||
|
||||
conn = _get_conn()
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO ledger (id, tx_type, status, payment_hash, amount_sats,
|
||||
memo, invoice, source, task_id, agent_id, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
entry.id,
|
||||
entry.tx_type.value,
|
||||
entry.status.value,
|
||||
entry.payment_hash,
|
||||
entry.amount_sats,
|
||||
entry.memo,
|
||||
entry.invoice,
|
||||
entry.source,
|
||||
entry.task_id,
|
||||
entry.agent_id,
|
||||
entry.created_at,
|
||||
),
|
||||
)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
return entry
|
||||
|
||||
|
||||
def record_outgoing_payment(
|
||||
payment_hash: str,
|
||||
amount_sats: int,
|
||||
memo: str = "",
|
||||
invoice: Optional[str] = None,
|
||||
source: str = "system",
|
||||
task_id: Optional[str] = None,
|
||||
agent_id: Optional[str] = None,
|
||||
) -> LedgerEntry:
|
||||
"""Record an outgoing payment (we're paying someone).
|
||||
|
||||
Args:
|
||||
payment_hash: Lightning payment hash
|
||||
amount_sats: Payment amount in satoshis
|
||||
memo: Payment description
|
||||
invoice: BOLT11 invoice we paid
|
||||
source: Component that initiated payment
|
||||
task_id: Associated task ID
|
||||
agent_id: Associated agent ID
|
||||
|
||||
Returns:
|
||||
The created LedgerEntry
|
||||
"""
|
||||
entry = LedgerEntry(
|
||||
tx_type=TransactionType.OUTGOING,
|
||||
status=TransactionStatus.PENDING,
|
||||
payment_hash=payment_hash,
|
||||
amount_sats=amount_sats,
|
||||
memo=memo,
|
||||
invoice=invoice,
|
||||
source=source,
|
||||
task_id=task_id,
|
||||
agent_id=agent_id,
|
||||
)
|
||||
|
||||
conn = _get_conn()
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO ledger (id, tx_type, status, payment_hash, amount_sats,
|
||||
memo, invoice, source, task_id, agent_id, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
entry.id,
|
||||
entry.tx_type.value,
|
||||
entry.status.value,
|
||||
entry.payment_hash,
|
||||
entry.amount_sats,
|
||||
entry.memo,
|
||||
entry.invoice,
|
||||
entry.source,
|
||||
entry.task_id,
|
||||
entry.agent_id,
|
||||
entry.created_at,
|
||||
),
|
||||
)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
return entry
|
||||
|
||||
|
||||
def mark_settled(
|
||||
payment_hash: str,
|
||||
preimage: Optional[str] = None,
|
||||
fee_sats: int = 0,
|
||||
) -> Optional[LedgerEntry]:
|
||||
"""Mark a transaction as settled (payment received or sent successfully).
|
||||
|
||||
Args:
|
||||
payment_hash: Lightning payment hash
|
||||
preimage: Payment preimage (proof of payment)
|
||||
fee_sats: Routing fee paid (for outgoing payments)
|
||||
|
||||
Returns:
|
||||
Updated LedgerEntry or None if not found
|
||||
"""
|
||||
settled_at = datetime.now(timezone.utc).isoformat()
|
||||
|
||||
conn = _get_conn()
|
||||
cursor = conn.execute(
|
||||
"""
|
||||
UPDATE ledger
|
||||
SET status = ?, preimage = ?, settled_at = ?, fee_sats = ?
|
||||
WHERE payment_hash = ?
|
||||
""",
|
||||
(TransactionStatus.SETTLED.value, preimage, settled_at, fee_sats, payment_hash),
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
if cursor.rowcount == 0:
|
||||
conn.close()
|
||||
return None
|
||||
|
||||
# Fetch and return updated entry
|
||||
entry = get_by_hash(payment_hash)
|
||||
conn.close()
|
||||
return entry
|
||||
|
||||
|
||||
def mark_failed(payment_hash: str, reason: str = "") -> Optional[LedgerEntry]:
|
||||
"""Mark a transaction as failed.
|
||||
|
||||
Args:
|
||||
payment_hash: Lightning payment hash
|
||||
reason: Failure reason (stored in memo)
|
||||
|
||||
Returns:
|
||||
Updated LedgerEntry or None if not found
|
||||
"""
|
||||
conn = _get_conn()
|
||||
cursor = conn.execute(
|
||||
"""
|
||||
UPDATE ledger
|
||||
SET status = ?, memo = memo || ' [FAILED: ' || ? || ']'
|
||||
WHERE payment_hash = ?
|
||||
""",
|
||||
(TransactionStatus.FAILED.value, reason, payment_hash),
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
if cursor.rowcount == 0:
|
||||
conn.close()
|
||||
return None
|
||||
|
||||
entry = get_by_hash(payment_hash)
|
||||
conn.close()
|
||||
return entry
|
||||
|
||||
|
||||
def get_by_hash(payment_hash: str) -> Optional[LedgerEntry]:
|
||||
"""Get a transaction by payment hash."""
|
||||
conn = _get_conn()
|
||||
row = conn.execute(
|
||||
"SELECT * FROM ledger WHERE payment_hash = ?", (payment_hash,)
|
||||
).fetchone()
|
||||
conn.close()
|
||||
|
||||
if row is None:
|
||||
return None
|
||||
|
||||
return LedgerEntry(
|
||||
id=row["id"],
|
||||
tx_type=TransactionType(row["tx_type"]),
|
||||
status=TransactionStatus(row["status"]),
|
||||
payment_hash=row["payment_hash"],
|
||||
amount_sats=row["amount_sats"],
|
||||
memo=row["memo"],
|
||||
invoice=row["invoice"],
|
||||
preimage=row["preimage"],
|
||||
source=row["source"],
|
||||
task_id=row["task_id"],
|
||||
agent_id=row["agent_id"],
|
||||
created_at=row["created_at"],
|
||||
settled_at=row["settled_at"],
|
||||
fee_sats=row["fee_sats"],
|
||||
)
|
||||
|
||||
|
||||
def list_transactions(
|
||||
tx_type: Optional[TransactionType] = None,
|
||||
status: Optional[TransactionStatus] = None,
|
||||
task_id: Optional[str] = None,
|
||||
agent_id: Optional[str] = None,
|
||||
limit: int = 100,
|
||||
offset: int = 0,
|
||||
) -> list[LedgerEntry]:
|
||||
"""List transactions with optional filtering.
|
||||
|
||||
Returns:
|
||||
List of LedgerEntry objects, newest first
|
||||
"""
|
||||
conn = _get_conn()
|
||||
|
||||
conditions = []
|
||||
params = []
|
||||
|
||||
if tx_type:
|
||||
conditions.append("tx_type = ?")
|
||||
params.append(tx_type.value)
|
||||
if status:
|
||||
conditions.append("status = ?")
|
||||
params.append(status.value)
|
||||
if task_id:
|
||||
conditions.append("task_id = ?")
|
||||
params.append(task_id)
|
||||
if agent_id:
|
||||
conditions.append("agent_id = ?")
|
||||
params.append(agent_id)
|
||||
|
||||
where_clause = "WHERE " + " AND ".join(conditions) if conditions else ""
|
||||
|
||||
query = f"""
|
||||
SELECT * FROM ledger
|
||||
{where_clause}
|
||||
ORDER BY created_at DESC
|
||||
LIMIT ? OFFSET ?
|
||||
"""
|
||||
params.extend([limit, offset])
|
||||
|
||||
rows = conn.execute(query, params).fetchall()
|
||||
conn.close()
|
||||
|
||||
return [
|
||||
LedgerEntry(
|
||||
id=r["id"],
|
||||
tx_type=TransactionType(r["tx_type"]),
|
||||
status=TransactionStatus(r["status"]),
|
||||
payment_hash=r["payment_hash"],
|
||||
amount_sats=r["amount_sats"],
|
||||
memo=r["memo"],
|
||||
invoice=r["invoice"],
|
||||
preimage=r["preimage"],
|
||||
source=r["source"],
|
||||
task_id=r["task_id"],
|
||||
agent_id=r["agent_id"],
|
||||
created_at=r["created_at"],
|
||||
settled_at=r["settled_at"],
|
||||
fee_sats=r["fee_sats"],
|
||||
)
|
||||
for r in rows
|
||||
]
|
||||
|
||||
|
||||
def get_balance() -> dict:
|
||||
"""Get current balance summary.
|
||||
|
||||
Returns:
|
||||
Dict with incoming, outgoing, pending, and available balances
|
||||
"""
|
||||
conn = _get_conn()
|
||||
|
||||
# Incoming (invoices we created that are settled)
|
||||
incoming = conn.execute(
|
||||
"""
|
||||
SELECT COALESCE(SUM(amount_sats), 0) as total
|
||||
FROM ledger
|
||||
WHERE tx_type = ? AND status = ?
|
||||
""",
|
||||
(TransactionType.INCOMING.value, TransactionStatus.SETTLED.value),
|
||||
).fetchone()["total"]
|
||||
|
||||
# Outgoing (payments we sent that are settled)
|
||||
outgoing_result = conn.execute(
|
||||
"""
|
||||
SELECT COALESCE(SUM(amount_sats), 0) as total,
|
||||
COALESCE(SUM(fee_sats), 0) as fees
|
||||
FROM ledger
|
||||
WHERE tx_type = ? AND status = ?
|
||||
""",
|
||||
(TransactionType.OUTGOING.value, TransactionStatus.SETTLED.value),
|
||||
).fetchone()
|
||||
outgoing = outgoing_result["total"]
|
||||
fees = outgoing_result["fees"]
|
||||
|
||||
# Pending incoming
|
||||
pending_incoming = conn.execute(
|
||||
"""
|
||||
SELECT COALESCE(SUM(amount_sats), 0) as total
|
||||
FROM ledger
|
||||
WHERE tx_type = ? AND status = ?
|
||||
""",
|
||||
(TransactionType.INCOMING.value, TransactionStatus.PENDING.value),
|
||||
).fetchone()["total"]
|
||||
|
||||
# Pending outgoing
|
||||
pending_outgoing = conn.execute(
|
||||
"""
|
||||
SELECT COALESCE(SUM(amount_sats), 0) as total
|
||||
FROM ledger
|
||||
WHERE tx_type = ? AND status = ?
|
||||
""",
|
||||
(TransactionType.OUTGOING.value, TransactionStatus.PENDING.value),
|
||||
).fetchone()["total"]
|
||||
|
||||
conn.close()
|
||||
|
||||
return {
|
||||
"incoming_total_sats": incoming,
|
||||
"outgoing_total_sats": outgoing,
|
||||
"fees_paid_sats": fees,
|
||||
"net_sats": incoming - outgoing - fees,
|
||||
"pending_incoming_sats": pending_incoming,
|
||||
"pending_outgoing_sats": pending_outgoing,
|
||||
"available_sats": incoming - outgoing - fees - pending_outgoing,
|
||||
}
|
||||
|
||||
|
||||
def get_transaction_stats(days: int = 30) -> dict:
|
||||
"""Get transaction statistics for the last N days.
|
||||
|
||||
Returns:
|
||||
Dict with daily transaction counts and volumes
|
||||
"""
|
||||
conn = _get_conn()
|
||||
|
||||
from datetime import timedelta
|
||||
cutoff = (datetime.now(timezone.utc) - timedelta(days=days)).isoformat()
|
||||
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
date(created_at) as date,
|
||||
tx_type,
|
||||
status,
|
||||
COUNT(*) as count,
|
||||
SUM(amount_sats) as volume
|
||||
FROM ledger
|
||||
WHERE created_at > ?
|
||||
GROUP BY date(created_at), tx_type, status
|
||||
ORDER BY date DESC
|
||||
""",
|
||||
(cutoff,),
|
||||
).fetchall()
|
||||
|
||||
conn.close()
|
||||
|
||||
stats = {}
|
||||
for r in rows:
|
||||
date = r["date"]
|
||||
if date not in stats:
|
||||
stats[date] = {"incoming": {"count": 0, "volume": 0},
|
||||
"outgoing": {"count": 0, "volume": 0}}
|
||||
|
||||
tx_type = r["tx_type"]
|
||||
if tx_type == TransactionType.INCOMING.value:
|
||||
stats[date]["incoming"]["count"] += r["count"]
|
||||
stats[date]["incoming"]["volume"] += r["volume"]
|
||||
else:
|
||||
stats[date]["outgoing"]["count"] += r["count"]
|
||||
stats[date]["outgoing"]["volume"] += r["volume"]
|
||||
|
||||
return stats
|
||||
483
src/memory/vector_store.py
Normal file
483
src/memory/vector_store.py
Normal file
@@ -0,0 +1,483 @@
|
||||
"""Vector store for semantic memory using sqlite-vss.
|
||||
|
||||
Provides embedding-based similarity search for the Echo agent
|
||||
to retrieve relevant context from conversation history.
|
||||
"""
|
||||
|
||||
import json
|
||||
import sqlite3
|
||||
import uuid
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
DB_PATH = Path("data/swarm.db")
|
||||
|
||||
# Simple embedding function using sentence-transformers if available,
|
||||
# otherwise fall back to keyword-based "pseudo-embeddings"
|
||||
try:
|
||||
from sentence_transformers import SentenceTransformer
|
||||
_model = SentenceTransformer('all-MiniLM-L6-v2')
|
||||
_has_embeddings = True
|
||||
except ImportError:
|
||||
_has_embeddings = False
|
||||
_model = None
|
||||
|
||||
|
||||
def _get_embedding_dimension() -> int:
|
||||
"""Get the dimension of embeddings."""
|
||||
if _has_embeddings and _model:
|
||||
return _model.get_sentence_embedding_dimension()
|
||||
return 384 # Default for all-MiniLM-L6-v2
|
||||
|
||||
|
||||
def _compute_embedding(text: str) -> list[float]:
|
||||
"""Compute embedding vector for text.
|
||||
|
||||
Uses sentence-transformers if available, otherwise returns
|
||||
a simple hash-based vector for basic similarity.
|
||||
"""
|
||||
if _has_embeddings and _model:
|
||||
return _model.encode(text).tolist()
|
||||
|
||||
# Fallback: simple character n-gram hash embedding
|
||||
# Not as good but allows the system to work without heavy deps
|
||||
dim = 384
|
||||
vec = [0.0] * dim
|
||||
text = text.lower()
|
||||
|
||||
# Generate character trigram features
|
||||
for i in range(len(text) - 2):
|
||||
trigram = text[i:i+3]
|
||||
hash_val = hash(trigram) % dim
|
||||
vec[hash_val] += 1.0
|
||||
|
||||
# Normalize
|
||||
norm = sum(x*x for x in vec) ** 0.5
|
||||
if norm > 0:
|
||||
vec = [x/norm for x in vec]
|
||||
|
||||
return vec
|
||||
|
||||
|
||||
@dataclass
|
||||
class MemoryEntry:
|
||||
"""A memory entry with vector embedding."""
|
||||
id: str = field(default_factory=lambda: str(uuid.uuid4()))
|
||||
content: str = "" # The actual text content
|
||||
source: str = "" # Where it came from (agent, user, system)
|
||||
context_type: str = "conversation" # conversation, document, fact, etc.
|
||||
agent_id: Optional[str] = None
|
||||
task_id: Optional[str] = None
|
||||
session_id: Optional[str] = None
|
||||
metadata: Optional[dict] = None
|
||||
embedding: Optional[list[float]] = None
|
||||
timestamp: str = field(
|
||||
default_factory=lambda: datetime.now(timezone.utc).isoformat()
|
||||
)
|
||||
relevance_score: Optional[float] = None # Set during search
|
||||
|
||||
|
||||
def _get_conn() -> sqlite3.Connection:
|
||||
"""Get database connection with vector extension."""
|
||||
DB_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
conn = sqlite3.connect(str(DB_PATH))
|
||||
conn.row_factory = sqlite3.Row
|
||||
|
||||
# Try to load sqlite-vss extension
|
||||
try:
|
||||
conn.enable_load_extension(True)
|
||||
conn.load_extension("vector0")
|
||||
conn.load_extension("vss0")
|
||||
_has_vss = True
|
||||
except Exception:
|
||||
_has_vss = False
|
||||
|
||||
# Create tables
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS memory_entries (
|
||||
id TEXT PRIMARY KEY,
|
||||
content TEXT NOT NULL,
|
||||
source TEXT NOT NULL,
|
||||
context_type TEXT NOT NULL DEFAULT 'conversation',
|
||||
agent_id TEXT,
|
||||
task_id TEXT,
|
||||
session_id TEXT,
|
||||
metadata TEXT,
|
||||
embedding TEXT, -- JSON array of floats
|
||||
timestamp TEXT NOT NULL
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
# Create indexes
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_memory_agent ON memory_entries(agent_id)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_memory_task ON memory_entries(task_id)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_memory_session ON memory_entries(session_id)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_memory_time ON memory_entries(timestamp)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_memory_type ON memory_entries(context_type)"
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
return conn
|
||||
|
||||
|
||||
def store_memory(
|
||||
content: str,
|
||||
source: str,
|
||||
context_type: str = "conversation",
|
||||
agent_id: Optional[str] = None,
|
||||
task_id: Optional[str] = None,
|
||||
session_id: Optional[str] = None,
|
||||
metadata: Optional[dict] = None,
|
||||
compute_embedding: bool = True,
|
||||
) -> MemoryEntry:
|
||||
"""Store a memory entry with optional embedding.
|
||||
|
||||
Args:
|
||||
content: The text content to store
|
||||
source: Source of the memory (agent name, user, system)
|
||||
context_type: Type of context (conversation, document, fact)
|
||||
agent_id: Associated agent ID
|
||||
task_id: Associated task ID
|
||||
session_id: Session identifier
|
||||
metadata: Additional structured data
|
||||
compute_embedding: Whether to compute vector embedding
|
||||
|
||||
Returns:
|
||||
The stored MemoryEntry
|
||||
"""
|
||||
embedding = None
|
||||
if compute_embedding:
|
||||
embedding = _compute_embedding(content)
|
||||
|
||||
entry = MemoryEntry(
|
||||
content=content,
|
||||
source=source,
|
||||
context_type=context_type,
|
||||
agent_id=agent_id,
|
||||
task_id=task_id,
|
||||
session_id=session_id,
|
||||
metadata=metadata,
|
||||
embedding=embedding,
|
||||
)
|
||||
|
||||
conn = _get_conn()
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO memory_entries
|
||||
(id, content, source, context_type, agent_id, task_id, session_id,
|
||||
metadata, embedding, timestamp)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
entry.id,
|
||||
entry.content,
|
||||
entry.source,
|
||||
entry.context_type,
|
||||
entry.agent_id,
|
||||
entry.task_id,
|
||||
entry.session_id,
|
||||
json.dumps(metadata) if metadata else None,
|
||||
json.dumps(embedding) if embedding else None,
|
||||
entry.timestamp,
|
||||
),
|
||||
)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
return entry
|
||||
|
||||
|
||||
def search_memories(
|
||||
query: str,
|
||||
limit: int = 10,
|
||||
context_type: Optional[str] = None,
|
||||
agent_id: Optional[str] = None,
|
||||
session_id: Optional[str] = None,
|
||||
min_relevance: float = 0.0,
|
||||
) -> list[MemoryEntry]:
|
||||
"""Search for memories by semantic similarity.
|
||||
|
||||
Args:
|
||||
query: Search query text
|
||||
limit: Maximum results
|
||||
context_type: Filter by context type
|
||||
agent_id: Filter by agent
|
||||
session_id: Filter by session
|
||||
min_relevance: Minimum similarity score (0-1)
|
||||
|
||||
Returns:
|
||||
List of MemoryEntry objects sorted by relevance
|
||||
"""
|
||||
query_embedding = _compute_embedding(query)
|
||||
|
||||
conn = _get_conn()
|
||||
|
||||
# Build query with filters
|
||||
conditions = []
|
||||
params = []
|
||||
|
||||
if context_type:
|
||||
conditions.append("context_type = ?")
|
||||
params.append(context_type)
|
||||
if agent_id:
|
||||
conditions.append("agent_id = ?")
|
||||
params.append(agent_id)
|
||||
if session_id:
|
||||
conditions.append("session_id = ?")
|
||||
params.append(session_id)
|
||||
|
||||
where_clause = "WHERE " + " AND ".join(conditions) if conditions else ""
|
||||
|
||||
# Fetch candidates (we'll do in-memory similarity for now)
|
||||
# For production with sqlite-vss, this would use vector similarity index
|
||||
query_sql = f"""
|
||||
SELECT * FROM memory_entries
|
||||
{where_clause}
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT ?
|
||||
"""
|
||||
params.append(limit * 3) # Get more candidates for ranking
|
||||
|
||||
rows = conn.execute(query_sql, params).fetchall()
|
||||
conn.close()
|
||||
|
||||
# Compute similarity scores
|
||||
results = []
|
||||
for row in rows:
|
||||
entry = MemoryEntry(
|
||||
id=row["id"],
|
||||
content=row["content"],
|
||||
source=row["source"],
|
||||
context_type=row["context_type"],
|
||||
agent_id=row["agent_id"],
|
||||
task_id=row["task_id"],
|
||||
session_id=row["session_id"],
|
||||
metadata=json.loads(row["metadata"]) if row["metadata"] else None,
|
||||
embedding=json.loads(row["embedding"]) if row["embedding"] else None,
|
||||
timestamp=row["timestamp"],
|
||||
)
|
||||
|
||||
if entry.embedding:
|
||||
# Cosine similarity
|
||||
score = _cosine_similarity(query_embedding, entry.embedding)
|
||||
entry.relevance_score = score
|
||||
if score >= min_relevance:
|
||||
results.append(entry)
|
||||
else:
|
||||
# Fallback: check for keyword overlap
|
||||
score = _keyword_overlap(query, entry.content)
|
||||
entry.relevance_score = score
|
||||
if score >= min_relevance:
|
||||
results.append(entry)
|
||||
|
||||
# Sort by relevance and return top results
|
||||
results.sort(key=lambda x: x.relevance_score or 0, reverse=True)
|
||||
return results[:limit]
|
||||
|
||||
|
||||
def _cosine_similarity(a: list[float], b: list[float]) -> float:
|
||||
"""Compute cosine similarity between two vectors."""
|
||||
dot = sum(x*y for x, y in zip(a, b))
|
||||
norm_a = sum(x*x for x in a) ** 0.5
|
||||
norm_b = sum(x*x for x in b) ** 0.5
|
||||
if norm_a == 0 or norm_b == 0:
|
||||
return 0.0
|
||||
return dot / (norm_a * norm_b)
|
||||
|
||||
|
||||
def _keyword_overlap(query: str, content: str) -> float:
|
||||
"""Simple keyword overlap score as fallback."""
|
||||
query_words = set(query.lower().split())
|
||||
content_words = set(content.lower().split())
|
||||
if not query_words:
|
||||
return 0.0
|
||||
overlap = len(query_words & content_words)
|
||||
return overlap / len(query_words)
|
||||
|
||||
|
||||
def get_memory_context(
|
||||
query: str,
|
||||
max_tokens: int = 2000,
|
||||
**filters
|
||||
) -> str:
|
||||
"""Get relevant memory context as formatted text for LLM prompts.
|
||||
|
||||
Args:
|
||||
query: Search query
|
||||
max_tokens: Approximate maximum tokens to return
|
||||
**filters: Additional filters (agent_id, session_id, etc.)
|
||||
|
||||
Returns:
|
||||
Formatted context string for inclusion in prompts
|
||||
"""
|
||||
memories = search_memories(query, limit=20, **filters)
|
||||
|
||||
context_parts = []
|
||||
total_chars = 0
|
||||
max_chars = max_tokens * 4 # Rough approximation
|
||||
|
||||
for mem in memories:
|
||||
formatted = f"[{mem.source}]: {mem.content}"
|
||||
if total_chars + len(formatted) > max_chars:
|
||||
break
|
||||
context_parts.append(formatted)
|
||||
total_chars += len(formatted)
|
||||
|
||||
if not context_parts:
|
||||
return ""
|
||||
|
||||
return "Relevant context from memory:\n" + "\n\n".join(context_parts)
|
||||
|
||||
|
||||
def recall_personal_facts(agent_id: Optional[str] = None) -> list[str]:
|
||||
"""Recall personal facts about the user or system.
|
||||
|
||||
Args:
|
||||
agent_id: Optional agent filter
|
||||
|
||||
Returns:
|
||||
List of fact strings
|
||||
"""
|
||||
conn = _get_conn()
|
||||
|
||||
if agent_id:
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT content FROM memory_entries
|
||||
WHERE context_type = 'fact' AND agent_id = ?
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT 100
|
||||
""",
|
||||
(agent_id,),
|
||||
).fetchall()
|
||||
else:
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT content FROM memory_entries
|
||||
WHERE context_type = 'fact'
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT 100
|
||||
""",
|
||||
).fetchall()
|
||||
|
||||
conn.close()
|
||||
return [r["content"] for r in rows]
|
||||
|
||||
|
||||
def store_personal_fact(fact: str, agent_id: Optional[str] = None) -> MemoryEntry:
|
||||
"""Store a personal fact about the user or system.
|
||||
|
||||
Args:
|
||||
fact: The fact to store
|
||||
agent_id: Associated agent
|
||||
|
||||
Returns:
|
||||
The stored MemoryEntry
|
||||
"""
|
||||
return store_memory(
|
||||
content=fact,
|
||||
source="system",
|
||||
context_type="fact",
|
||||
agent_id=agent_id,
|
||||
metadata={"auto_extracted": False},
|
||||
)
|
||||
|
||||
|
||||
def delete_memory(memory_id: str) -> bool:
|
||||
"""Delete a memory entry by ID.
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found
|
||||
"""
|
||||
conn = _get_conn()
|
||||
cursor = conn.execute(
|
||||
"DELETE FROM memory_entries WHERE id = ?",
|
||||
(memory_id,),
|
||||
)
|
||||
conn.commit()
|
||||
deleted = cursor.rowcount > 0
|
||||
conn.close()
|
||||
return deleted
|
||||
|
||||
|
||||
def get_memory_stats() -> dict:
|
||||
"""Get statistics about the memory store.
|
||||
|
||||
Returns:
|
||||
Dict with counts by type, total entries, etc.
|
||||
"""
|
||||
conn = _get_conn()
|
||||
|
||||
total = conn.execute(
|
||||
"SELECT COUNT(*) as count FROM memory_entries"
|
||||
).fetchone()["count"]
|
||||
|
||||
by_type = {}
|
||||
rows = conn.execute(
|
||||
"SELECT context_type, COUNT(*) as count FROM memory_entries GROUP BY context_type"
|
||||
).fetchall()
|
||||
for row in rows:
|
||||
by_type[row["context_type"]] = row["count"]
|
||||
|
||||
with_embeddings = conn.execute(
|
||||
"SELECT COUNT(*) as count FROM memory_entries WHERE embedding IS NOT NULL"
|
||||
).fetchone()["count"]
|
||||
|
||||
conn.close()
|
||||
|
||||
return {
|
||||
"total_entries": total,
|
||||
"by_type": by_type,
|
||||
"with_embeddings": with_embeddings,
|
||||
"has_embedding_model": _has_embeddings,
|
||||
}
|
||||
|
||||
|
||||
def prune_memories(older_than_days: int = 90, keep_facts: bool = True) -> int:
|
||||
"""Delete old memories to manage storage.
|
||||
|
||||
Args:
|
||||
older_than_days: Delete memories older than this
|
||||
keep_facts: Whether to preserve fact-type memories
|
||||
|
||||
Returns:
|
||||
Number of entries deleted
|
||||
"""
|
||||
from datetime import timedelta
|
||||
|
||||
cutoff = (datetime.now(timezone.utc) - timedelta(days=older_than_days)).isoformat()
|
||||
|
||||
conn = _get_conn()
|
||||
|
||||
if keep_facts:
|
||||
cursor = conn.execute(
|
||||
"""
|
||||
DELETE FROM memory_entries
|
||||
WHERE timestamp < ? AND context_type != 'fact'
|
||||
""",
|
||||
(cutoff,),
|
||||
)
|
||||
else:
|
||||
cursor = conn.execute(
|
||||
"DELETE FROM memory_entries WHERE timestamp < ?",
|
||||
(cutoff,),
|
||||
)
|
||||
|
||||
deleted = cursor.rowcount
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
return deleted
|
||||
@@ -28,6 +28,10 @@ from swarm.tasks import (
|
||||
list_tasks,
|
||||
update_task,
|
||||
)
|
||||
from swarm.event_log import (
|
||||
EventType,
|
||||
log_event,
|
||||
)
|
||||
|
||||
# Spark Intelligence integration — lazy import to avoid circular deps
|
||||
def _get_spark():
|
||||
@@ -92,6 +96,14 @@ class SwarmCoordinator:
|
||||
|
||||
aid = agent_id or str(__import__("uuid").uuid4())
|
||||
node = PersonaNode(persona_id=persona_id, agent_id=aid, comms=self.comms)
|
||||
|
||||
# Log agent join event
|
||||
log_event(
|
||||
EventType.AGENT_JOINED,
|
||||
source="coordinator",
|
||||
agent_id=aid,
|
||||
data={"persona_id": persona_id, "name": node.name},
|
||||
)
|
||||
|
||||
def _bid_and_register(msg):
|
||||
task_id = msg.data.get("task_id")
|
||||
@@ -209,6 +221,18 @@ class SwarmCoordinator:
|
||||
self.auctions.open_auction(task.id)
|
||||
self.comms.post_task(task.id, description)
|
||||
logger.info("Task posted: %s (%s)", task.id, description[:50])
|
||||
# Log task creation event
|
||||
log_event(
|
||||
EventType.TASK_CREATED,
|
||||
source="coordinator",
|
||||
task_id=task.id,
|
||||
data={"description": description[:200]},
|
||||
)
|
||||
log_event(
|
||||
EventType.TASK_BIDDING,
|
||||
source="coordinator",
|
||||
task_id=task.id,
|
||||
)
|
||||
# Broadcast task posted via WebSocket
|
||||
self._broadcast(self._broadcast_task_posted, task.id, description)
|
||||
# Spark: capture task-posted event with candidate agents
|
||||
@@ -280,6 +304,14 @@ class SwarmCoordinator:
|
||||
"Task %s assigned to %s at %d sats",
|
||||
task_id, winner.agent_id, winner.bid_sats,
|
||||
)
|
||||
# Log task assignment event
|
||||
log_event(
|
||||
EventType.TASK_ASSIGNED,
|
||||
source="coordinator",
|
||||
task_id=task_id,
|
||||
agent_id=winner.agent_id,
|
||||
data={"bid_sats": winner.bid_sats},
|
||||
)
|
||||
# Broadcast task assigned via WebSocket
|
||||
self._broadcast(self._broadcast_task_assigned, task_id, winner.agent_id)
|
||||
# Spark: capture assignment
|
||||
@@ -289,6 +321,13 @@ class SwarmCoordinator:
|
||||
else:
|
||||
update_task(task_id, status=TaskStatus.FAILED)
|
||||
logger.warning("Task %s: no bids received, marked as failed", task_id)
|
||||
# Log task failure event
|
||||
log_event(
|
||||
EventType.TASK_FAILED,
|
||||
source="coordinator",
|
||||
task_id=task_id,
|
||||
data={"reason": "no bids received"},
|
||||
)
|
||||
return winner
|
||||
|
||||
def complete_task(self, task_id: str, result: str) -> Optional[Task]:
|
||||
@@ -308,6 +347,14 @@ class SwarmCoordinator:
|
||||
self.comms.complete_task(task_id, task.assigned_agent, result)
|
||||
# Record success in learner
|
||||
swarm_learner.record_task_result(task_id, task.assigned_agent, succeeded=True)
|
||||
# Log task completion event
|
||||
log_event(
|
||||
EventType.TASK_COMPLETED,
|
||||
source="coordinator",
|
||||
task_id=task_id,
|
||||
agent_id=task.assigned_agent,
|
||||
data={"result_preview": result[:500]},
|
||||
)
|
||||
# Broadcast task completed via WebSocket
|
||||
self._broadcast(
|
||||
self._broadcast_task_completed,
|
||||
@@ -335,6 +382,14 @@ class SwarmCoordinator:
|
||||
registry.update_status(task.assigned_agent, "idle")
|
||||
# Record failure in learner
|
||||
swarm_learner.record_task_result(task_id, task.assigned_agent, succeeded=False)
|
||||
# Log task failure event
|
||||
log_event(
|
||||
EventType.TASK_FAILED,
|
||||
source="coordinator",
|
||||
task_id=task_id,
|
||||
agent_id=task.assigned_agent,
|
||||
data={"reason": reason},
|
||||
)
|
||||
# Spark: capture failure
|
||||
spark = _get_spark()
|
||||
if spark:
|
||||
|
||||
329
src/swarm/event_log.py
Normal file
329
src/swarm/event_log.py
Normal file
@@ -0,0 +1,329 @@
|
||||
"""Event logging for swarm system.
|
||||
|
||||
All agent actions, task lifecycle events, and system events are logged
|
||||
to SQLite for audit, debugging, and analytics.
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
import uuid
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
DB_PATH = Path("data/swarm.db")
|
||||
|
||||
|
||||
class EventType(str, Enum):
|
||||
"""Types of events logged."""
|
||||
# Task lifecycle
|
||||
TASK_CREATED = "task.created"
|
||||
TASK_BIDDING = "task.bidding"
|
||||
TASK_ASSIGNED = "task.assigned"
|
||||
TASK_STARTED = "task.started"
|
||||
TASK_COMPLETED = "task.completed"
|
||||
TASK_FAILED = "task.failed"
|
||||
|
||||
# Agent lifecycle
|
||||
AGENT_JOINED = "agent.joined"
|
||||
AGENT_LEFT = "agent.left"
|
||||
AGENT_STATUS_CHANGED = "agent.status_changed"
|
||||
|
||||
# Bidding
|
||||
BID_SUBMITTED = "bid.submitted"
|
||||
AUCTION_CLOSED = "auction.closed"
|
||||
|
||||
# Tool execution
|
||||
TOOL_CALLED = "tool.called"
|
||||
TOOL_COMPLETED = "tool.completed"
|
||||
TOOL_FAILED = "tool.failed"
|
||||
|
||||
# System
|
||||
SYSTEM_ERROR = "system.error"
|
||||
SYSTEM_WARNING = "system.warning"
|
||||
SYSTEM_INFO = "system.info"
|
||||
|
||||
|
||||
@dataclass
|
||||
class EventLogEntry:
|
||||
"""A logged event."""
|
||||
id: str = field(default_factory=lambda: str(uuid.uuid4()))
|
||||
event_type: EventType = EventType.SYSTEM_INFO
|
||||
source: str = "" # Agent or component that emitted the event
|
||||
task_id: Optional[str] = None
|
||||
agent_id: Optional[str] = None
|
||||
data: Optional[str] = None # JSON string of additional data
|
||||
timestamp: str = field(
|
||||
default_factory=lambda: datetime.now(timezone.utc).isoformat()
|
||||
)
|
||||
|
||||
|
||||
def _get_conn() -> sqlite3.Connection:
|
||||
DB_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
conn = sqlite3.connect(str(DB_PATH))
|
||||
conn.row_factory = sqlite3.Row
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS event_log (
|
||||
id TEXT PRIMARY KEY,
|
||||
event_type TEXT NOT NULL,
|
||||
source TEXT NOT NULL,
|
||||
task_id TEXT,
|
||||
agent_id TEXT,
|
||||
data TEXT,
|
||||
timestamp TEXT NOT NULL
|
||||
)
|
||||
"""
|
||||
)
|
||||
# Create indexes for common queries
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_event_log_task ON event_log(task_id)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_event_log_agent ON event_log(agent_id)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_event_log_type ON event_log(event_type)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_event_log_time ON event_log(timestamp)"
|
||||
)
|
||||
conn.commit()
|
||||
return conn
|
||||
|
||||
|
||||
def log_event(
|
||||
event_type: EventType,
|
||||
source: str,
|
||||
task_id: Optional[str] = None,
|
||||
agent_id: Optional[str] = None,
|
||||
data: Optional[dict] = None,
|
||||
) -> EventLogEntry:
|
||||
"""Log an event to the database.
|
||||
|
||||
Args:
|
||||
event_type: Type of event
|
||||
source: Component or agent that emitted the event
|
||||
task_id: Optional associated task ID
|
||||
agent_id: Optional associated agent ID
|
||||
data: Optional dictionary of additional data (will be JSON serialized)
|
||||
|
||||
Returns:
|
||||
The created EventLogEntry
|
||||
"""
|
||||
import json
|
||||
|
||||
entry = EventLogEntry(
|
||||
event_type=event_type,
|
||||
source=source,
|
||||
task_id=task_id,
|
||||
agent_id=agent_id,
|
||||
data=json.dumps(data) if data else None,
|
||||
)
|
||||
|
||||
conn = _get_conn()
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO event_log (id, event_type, source, task_id, agent_id, data, timestamp)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
entry.id,
|
||||
entry.event_type.value,
|
||||
entry.source,
|
||||
entry.task_id,
|
||||
entry.agent_id,
|
||||
entry.data,
|
||||
entry.timestamp,
|
||||
),
|
||||
)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
# Broadcast to WebSocket clients for real-time activity feed
|
||||
try:
|
||||
from events.broadcaster import event_broadcaster
|
||||
event_broadcaster.broadcast_sync(entry)
|
||||
except Exception:
|
||||
# Don't fail if broadcaster unavailable
|
||||
pass
|
||||
|
||||
return entry
|
||||
|
||||
|
||||
def get_event(event_id: str) -> Optional[EventLogEntry]:
|
||||
"""Get a single event by ID."""
|
||||
conn = _get_conn()
|
||||
row = conn.execute(
|
||||
"SELECT * FROM event_log WHERE id = ?", (event_id,)
|
||||
).fetchone()
|
||||
conn.close()
|
||||
|
||||
if row is None:
|
||||
return None
|
||||
|
||||
return EventLogEntry(
|
||||
id=row["id"],
|
||||
event_type=EventType(row["event_type"]),
|
||||
source=row["source"],
|
||||
task_id=row["task_id"],
|
||||
agent_id=row["agent_id"],
|
||||
data=row["data"],
|
||||
timestamp=row["timestamp"],
|
||||
)
|
||||
|
||||
|
||||
def list_events(
|
||||
event_type: Optional[EventType] = None,
|
||||
task_id: Optional[str] = None,
|
||||
agent_id: Optional[str] = None,
|
||||
source: Optional[str] = None,
|
||||
limit: int = 100,
|
||||
offset: int = 0,
|
||||
) -> list[EventLogEntry]:
|
||||
"""List events with optional filtering.
|
||||
|
||||
Args:
|
||||
event_type: Filter by event type
|
||||
task_id: Filter by associated task
|
||||
agent_id: Filter by associated agent
|
||||
source: Filter by source component
|
||||
limit: Maximum number of events to return
|
||||
offset: Number of events to skip (for pagination)
|
||||
|
||||
Returns:
|
||||
List of EventLogEntry objects, newest first
|
||||
"""
|
||||
conn = _get_conn()
|
||||
|
||||
conditions = []
|
||||
params = []
|
||||
|
||||
if event_type:
|
||||
conditions.append("event_type = ?")
|
||||
params.append(event_type.value)
|
||||
if task_id:
|
||||
conditions.append("task_id = ?")
|
||||
params.append(task_id)
|
||||
if agent_id:
|
||||
conditions.append("agent_id = ?")
|
||||
params.append(agent_id)
|
||||
if source:
|
||||
conditions.append("source = ?")
|
||||
params.append(source)
|
||||
|
||||
where_clause = "WHERE " + " AND ".join(conditions) if conditions else ""
|
||||
|
||||
query = f"""
|
||||
SELECT * FROM event_log
|
||||
{where_clause}
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT ? OFFSET ?
|
||||
"""
|
||||
params.extend([limit, offset])
|
||||
|
||||
rows = conn.execute(query, params).fetchall()
|
||||
conn.close()
|
||||
|
||||
return [
|
||||
EventLogEntry(
|
||||
id=r["id"],
|
||||
event_type=EventType(r["event_type"]),
|
||||
source=r["source"],
|
||||
task_id=r["task_id"],
|
||||
agent_id=r["agent_id"],
|
||||
data=r["data"],
|
||||
timestamp=r["timestamp"],
|
||||
)
|
||||
for r in rows
|
||||
]
|
||||
|
||||
|
||||
def get_task_events(task_id: str) -> list[EventLogEntry]:
|
||||
"""Get all events for a specific task."""
|
||||
return list_events(task_id=task_id, limit=1000)
|
||||
|
||||
|
||||
def get_agent_events(agent_id: str) -> list[EventLogEntry]:
|
||||
"""Get all events for a specific agent."""
|
||||
return list_events(agent_id=agent_id, limit=1000)
|
||||
|
||||
|
||||
def get_recent_events(minutes: int = 60) -> list[EventLogEntry]:
|
||||
"""Get events from the last N minutes."""
|
||||
conn = _get_conn()
|
||||
|
||||
from datetime import timedelta
|
||||
cutoff = (datetime.now(timezone.utc) - timedelta(minutes=minutes)).isoformat()
|
||||
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT * FROM event_log
|
||||
WHERE timestamp > ?
|
||||
ORDER BY timestamp DESC
|
||||
""",
|
||||
(cutoff,),
|
||||
).fetchall()
|
||||
conn.close()
|
||||
|
||||
return [
|
||||
EventLogEntry(
|
||||
id=r["id"],
|
||||
event_type=EventType(r["event_type"]),
|
||||
source=r["source"],
|
||||
task_id=r["task_id"],
|
||||
agent_id=r["agent_id"],
|
||||
data=r["data"],
|
||||
timestamp=r["timestamp"],
|
||||
)
|
||||
for r in rows
|
||||
]
|
||||
|
||||
|
||||
def get_event_summary(minutes: int = 60) -> dict:
|
||||
"""Get a summary of recent events by type.
|
||||
|
||||
Returns:
|
||||
Dict mapping event types to counts
|
||||
"""
|
||||
conn = _get_conn()
|
||||
|
||||
from datetime import timedelta
|
||||
cutoff = (datetime.now(timezone.utc) - timedelta(minutes=minutes)).isoformat()
|
||||
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT event_type, COUNT(*) as count
|
||||
FROM event_log
|
||||
WHERE timestamp > ?
|
||||
GROUP BY event_type
|
||||
ORDER BY count DESC
|
||||
""",
|
||||
(cutoff,),
|
||||
).fetchall()
|
||||
conn.close()
|
||||
|
||||
return {r["event_type"]: r["count"] for r in rows}
|
||||
|
||||
|
||||
def prune_events(older_than_days: int = 30) -> int:
|
||||
"""Delete events older than specified days.
|
||||
|
||||
Returns:
|
||||
Number of events deleted
|
||||
"""
|
||||
conn = _get_conn()
|
||||
|
||||
from datetime import timedelta
|
||||
cutoff = (datetime.now(timezone.utc) - timedelta(days=older_than_days)).isoformat()
|
||||
|
||||
cursor = conn.execute(
|
||||
"DELETE FROM event_log WHERE timestamp < ?",
|
||||
(cutoff,),
|
||||
)
|
||||
deleted = cursor.rowcount
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
return deleted
|
||||
137
src/timmy/cascade_adapter.py
Normal file
137
src/timmy/cascade_adapter.py
Normal file
@@ -0,0 +1,137 @@
|
||||
"""Cascade Router adapter for Timmy agent.
|
||||
|
||||
Provides automatic failover between LLM providers with:
|
||||
- Circuit breaker pattern for failing providers
|
||||
- Metrics tracking per provider
|
||||
- Priority-based routing (local first, then APIs)
|
||||
"""
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from router.cascade import CascadeRouter
|
||||
from timmy.prompts import TIMMY_SYSTEM_PROMPT
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TimmyResponse:
|
||||
"""Response from Timmy via Cascade Router."""
|
||||
content: str
|
||||
provider_used: str
|
||||
latency_ms: float
|
||||
fallback_used: bool = False
|
||||
|
||||
|
||||
class TimmyCascadeAdapter:
|
||||
"""Adapter that routes Timmy requests through Cascade Router.
|
||||
|
||||
Usage:
|
||||
adapter = TimmyCascadeAdapter()
|
||||
response = await adapter.chat("Hello")
|
||||
print(f"Response: {response.content}")
|
||||
print(f"Provider: {response.provider_used}")
|
||||
"""
|
||||
|
||||
def __init__(self, router: Optional[CascadeRouter] = None) -> None:
|
||||
"""Initialize adapter with Cascade Router.
|
||||
|
||||
Args:
|
||||
router: CascadeRouter instance. If None, creates default.
|
||||
"""
|
||||
self.router = router or CascadeRouter()
|
||||
logger.info("TimmyCascadeAdapter initialized with %d providers",
|
||||
len(self.router.providers))
|
||||
|
||||
async def chat(self, message: str, context: Optional[str] = None) -> TimmyResponse:
|
||||
"""Send message through cascade router with automatic failover.
|
||||
|
||||
Args:
|
||||
message: User message
|
||||
context: Optional conversation context
|
||||
|
||||
Returns:
|
||||
TimmyResponse with content and metadata
|
||||
"""
|
||||
# Build messages array
|
||||
messages = []
|
||||
if context:
|
||||
messages.append({"role": "system", "content": context})
|
||||
messages.append({"role": "user", "content": message})
|
||||
|
||||
# Route through cascade
|
||||
import time
|
||||
start = time.time()
|
||||
|
||||
try:
|
||||
result = await self.router.complete(
|
||||
messages=messages,
|
||||
system_prompt=TIMMY_SYSTEM_PROMPT,
|
||||
)
|
||||
|
||||
latency = (time.time() - start) * 1000
|
||||
|
||||
# Determine if fallback was used
|
||||
primary = self.router.providers[0] if self.router.providers else None
|
||||
fallback_used = primary and primary.status.value != "healthy"
|
||||
|
||||
return TimmyResponse(
|
||||
content=result.content,
|
||||
provider_used=result.provider_name,
|
||||
latency_ms=latency,
|
||||
fallback_used=fallback_used,
|
||||
)
|
||||
|
||||
except Exception as exc:
|
||||
logger.error("All providers failed: %s", exc)
|
||||
raise
|
||||
|
||||
def get_provider_status(self) -> list[dict]:
|
||||
"""Get status of all providers.
|
||||
|
||||
Returns:
|
||||
List of provider status dicts
|
||||
"""
|
||||
return [
|
||||
{
|
||||
"name": p.name,
|
||||
"type": p.type,
|
||||
"status": p.status.value,
|
||||
"circuit_state": p.circuit_state.value,
|
||||
"metrics": {
|
||||
"total": p.metrics.total_requests,
|
||||
"success": p.metrics.successful_requests,
|
||||
"failed": p.metrics.failed_requests,
|
||||
"avg_latency_ms": round(p.metrics.avg_latency_ms, 1),
|
||||
"error_rate": round(p.metrics.error_rate, 3),
|
||||
},
|
||||
"priority": p.priority,
|
||||
"enabled": p.enabled,
|
||||
}
|
||||
for p in self.router.providers
|
||||
]
|
||||
|
||||
def get_preferred_provider(self) -> Optional[str]:
|
||||
"""Get name of highest-priority healthy provider.
|
||||
|
||||
Returns:
|
||||
Provider name or None if all unhealthy
|
||||
"""
|
||||
for provider in self.router.providers:
|
||||
if provider.status.value == "healthy" and provider.enabled:
|
||||
return provider.name
|
||||
return None
|
||||
|
||||
|
||||
# Global singleton for reuse
|
||||
_cascade_adapter: Optional[TimmyCascadeAdapter] = None
|
||||
|
||||
|
||||
def get_cascade_adapter() -> TimmyCascadeAdapter:
|
||||
"""Get or create global cascade adapter singleton."""
|
||||
global _cascade_adapter
|
||||
if _cascade_adapter is None:
|
||||
_cascade_adapter = TimmyCascadeAdapter()
|
||||
return _cascade_adapter
|
||||
@@ -5,6 +5,8 @@ The actual backend (mock or LND) is selected via LIGHTNING_BACKEND env var.
|
||||
|
||||
For backward compatibility, the PaymentHandler class and payment_handler
|
||||
singleton are preserved, but they delegate to the lightning backend.
|
||||
|
||||
All transactions are logged to the ledger for audit and accounting.
|
||||
"""
|
||||
|
||||
import logging
|
||||
@@ -13,6 +15,12 @@ from typing import Optional
|
||||
# Import from the new lightning module
|
||||
from lightning import get_backend, Invoice
|
||||
from lightning.base import LightningBackend
|
||||
from lightning.ledger import (
|
||||
create_invoice_entry,
|
||||
mark_settled,
|
||||
get_balance,
|
||||
list_transactions,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -42,22 +50,66 @@ class PaymentHandler:
|
||||
self._backend = backend or get_backend()
|
||||
logger.info("PaymentHandler initialized — backend: %s", self._backend.name)
|
||||
|
||||
def create_invoice(self, amount_sats: int, memo: str = "") -> Invoice:
|
||||
"""Create a new Lightning invoice."""
|
||||
def create_invoice(
|
||||
self,
|
||||
amount_sats: int,
|
||||
memo: str = "",
|
||||
source: str = "payment_handler",
|
||||
task_id: Optional[str] = None,
|
||||
agent_id: Optional[str] = None,
|
||||
) -> Invoice:
|
||||
"""Create a new Lightning invoice.
|
||||
|
||||
Args:
|
||||
amount_sats: Invoice amount in satoshis
|
||||
memo: Payment description
|
||||
source: Component creating the invoice
|
||||
task_id: Associated task ID
|
||||
agent_id: Associated agent ID
|
||||
"""
|
||||
invoice = self._backend.create_invoice(amount_sats, memo)
|
||||
logger.info(
|
||||
"Invoice created: %d sats — %s (hash: %s…)",
|
||||
amount_sats, memo, invoice.payment_hash[:12],
|
||||
)
|
||||
|
||||
# Log to ledger
|
||||
create_invoice_entry(
|
||||
payment_hash=invoice.payment_hash,
|
||||
amount_sats=amount_sats,
|
||||
memo=memo,
|
||||
invoice=invoice.bolt11 if hasattr(invoice, 'bolt11') else None,
|
||||
source=source,
|
||||
task_id=task_id,
|
||||
agent_id=agent_id,
|
||||
)
|
||||
|
||||
return invoice
|
||||
|
||||
def check_payment(self, payment_hash: str) -> bool:
|
||||
"""Check whether an invoice has been paid."""
|
||||
return self._backend.check_payment(payment_hash)
|
||||
"""Check whether an invoice has been paid.
|
||||
|
||||
If paid, updates the ledger entry.
|
||||
"""
|
||||
is_paid = self._backend.check_payment(payment_hash)
|
||||
|
||||
if is_paid:
|
||||
# Update ledger entry
|
||||
mark_settled(payment_hash)
|
||||
|
||||
return is_paid
|
||||
|
||||
def settle_invoice(self, payment_hash: str, preimage: str) -> bool:
|
||||
"""Manually settle an invoice with a preimage (for testing)."""
|
||||
return self._backend.settle_invoice(payment_hash, preimage)
|
||||
"""Manually settle an invoice with a preimage (for testing).
|
||||
|
||||
Also updates the ledger entry.
|
||||
"""
|
||||
result = self._backend.settle_invoice(payment_hash, preimage)
|
||||
|
||||
if result:
|
||||
mark_settled(payment_hash, preimage=preimage)
|
||||
|
||||
return result
|
||||
|
||||
def get_invoice(self, payment_hash: str) -> Optional[Invoice]:
|
||||
"""Get invoice details by payment hash."""
|
||||
@@ -75,6 +127,26 @@ class PaymentHandler:
|
||||
def backend_name(self) -> str:
|
||||
"""Get the name of the current backend."""
|
||||
return self._backend.name
|
||||
|
||||
def get_balance(self) -> dict:
|
||||
"""Get current balance summary from ledger.
|
||||
|
||||
Returns:
|
||||
Dict with incoming, outgoing, pending, and available balances
|
||||
"""
|
||||
return get_balance()
|
||||
|
||||
def list_transactions(self, limit: int = 100, **filters) -> list:
|
||||
"""List transactions from ledger.
|
||||
|
||||
Args:
|
||||
limit: Maximum number of transactions
|
||||
**filters: Optional filters (tx_type, status, task_id, agent_id)
|
||||
|
||||
Returns:
|
||||
List of LedgerEntry objects
|
||||
"""
|
||||
return list_transactions(limit=limit, **filters)
|
||||
|
||||
|
||||
# Module-level singleton
|
||||
|
||||
331
src/upgrades/models.py
Normal file
331
src/upgrades/models.py
Normal file
@@ -0,0 +1,331 @@
|
||||
"""Database models for Self-Upgrade Approval Queue."""
|
||||
|
||||
import json
|
||||
import sqlite3
|
||||
import uuid
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
DB_PATH = Path("data/swarm.db")
|
||||
|
||||
|
||||
class UpgradeStatus(str, Enum):
|
||||
"""Status of an upgrade proposal."""
|
||||
PROPOSED = "proposed"
|
||||
APPROVED = "approved"
|
||||
REJECTED = "rejected"
|
||||
APPLIED = "applied"
|
||||
FAILED = "failed"
|
||||
EXPIRED = "expired"
|
||||
|
||||
|
||||
@dataclass
|
||||
class Upgrade:
|
||||
"""A self-modification upgrade proposal."""
|
||||
id: str = field(default_factory=lambda: str(uuid.uuid4()))
|
||||
status: UpgradeStatus = UpgradeStatus.PROPOSED
|
||||
|
||||
# Timestamps
|
||||
proposed_at: str = field(default_factory=lambda: datetime.now(timezone.utc).isoformat())
|
||||
approved_at: Optional[str] = None
|
||||
applied_at: Optional[str] = None
|
||||
rejected_at: Optional[str] = None
|
||||
|
||||
# Proposal details
|
||||
branch_name: str = ""
|
||||
description: str = ""
|
||||
files_changed: list[str] = field(default_factory=list)
|
||||
diff_preview: str = ""
|
||||
|
||||
# Test results
|
||||
test_passed: bool = False
|
||||
test_output: str = ""
|
||||
|
||||
# Execution results
|
||||
error_message: Optional[str] = None
|
||||
approved_by: Optional[str] = None
|
||||
|
||||
|
||||
def _get_conn() -> sqlite3.Connection:
|
||||
"""Get database connection with schema initialized."""
|
||||
DB_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
conn = sqlite3.connect(str(DB_PATH))
|
||||
conn.row_factory = sqlite3.Row
|
||||
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS upgrades (
|
||||
id TEXT PRIMARY KEY,
|
||||
status TEXT NOT NULL DEFAULT 'proposed',
|
||||
proposed_at TEXT NOT NULL,
|
||||
approved_at TEXT,
|
||||
applied_at TEXT,
|
||||
rejected_at TEXT,
|
||||
branch_name TEXT NOT NULL,
|
||||
description TEXT NOT NULL,
|
||||
files_changed TEXT, -- JSON array
|
||||
diff_preview TEXT,
|
||||
test_passed INTEGER DEFAULT 0,
|
||||
test_output TEXT,
|
||||
error_message TEXT,
|
||||
approved_by TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
# Indexes
|
||||
conn.execute("CREATE INDEX IF NOT EXISTS idx_upgrades_status ON upgrades(status)")
|
||||
conn.execute("CREATE INDEX IF NOT EXISTS idx_upgrades_proposed ON upgrades(proposed_at)")
|
||||
|
||||
conn.commit()
|
||||
return conn
|
||||
|
||||
|
||||
def create_upgrade(
|
||||
branch_name: str,
|
||||
description: str,
|
||||
files_changed: list[str],
|
||||
diff_preview: str,
|
||||
test_passed: bool = False,
|
||||
test_output: str = "",
|
||||
) -> Upgrade:
|
||||
"""Create a new upgrade proposal.
|
||||
|
||||
Args:
|
||||
branch_name: Git branch name for the upgrade
|
||||
description: Human-readable description
|
||||
files_changed: List of files that would be modified
|
||||
diff_preview: Short diff preview for review
|
||||
test_passed: Whether tests passed on the branch
|
||||
test_output: Test output text
|
||||
|
||||
Returns:
|
||||
The created Upgrade
|
||||
"""
|
||||
upgrade = Upgrade(
|
||||
branch_name=branch_name,
|
||||
description=description,
|
||||
files_changed=files_changed,
|
||||
diff_preview=diff_preview,
|
||||
test_passed=test_passed,
|
||||
test_output=test_output,
|
||||
)
|
||||
|
||||
conn = _get_conn()
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO upgrades (id, status, proposed_at, branch_name, description,
|
||||
files_changed, diff_preview, test_passed, test_output)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
upgrade.id,
|
||||
upgrade.status.value,
|
||||
upgrade.proposed_at,
|
||||
upgrade.branch_name,
|
||||
upgrade.description,
|
||||
json.dumps(files_changed),
|
||||
upgrade.diff_preview,
|
||||
int(test_passed),
|
||||
test_output,
|
||||
),
|
||||
)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
return upgrade
|
||||
|
||||
|
||||
def get_upgrade(upgrade_id: str) -> Optional[Upgrade]:
|
||||
"""Get upgrade by ID."""
|
||||
conn = _get_conn()
|
||||
row = conn.execute(
|
||||
"SELECT * FROM upgrades WHERE id = ?", (upgrade_id,)
|
||||
).fetchone()
|
||||
conn.close()
|
||||
|
||||
if not row:
|
||||
return None
|
||||
|
||||
return Upgrade(
|
||||
id=row["id"],
|
||||
status=UpgradeStatus(row["status"]),
|
||||
proposed_at=row["proposed_at"],
|
||||
approved_at=row["approved_at"],
|
||||
applied_at=row["applied_at"],
|
||||
rejected_at=row["rejected_at"],
|
||||
branch_name=row["branch_name"],
|
||||
description=row["description"],
|
||||
files_changed=json.loads(row["files_changed"]) if row["files_changed"] else [],
|
||||
diff_preview=row["diff_preview"] or "",
|
||||
test_passed=bool(row["test_passed"]),
|
||||
test_output=row["test_output"] or "",
|
||||
error_message=row["error_message"],
|
||||
approved_by=row["approved_by"],
|
||||
)
|
||||
|
||||
|
||||
def list_upgrades(
|
||||
status: Optional[UpgradeStatus] = None,
|
||||
limit: int = 100,
|
||||
) -> list[Upgrade]:
|
||||
"""List upgrades, optionally filtered by status."""
|
||||
conn = _get_conn()
|
||||
|
||||
if status:
|
||||
rows = conn.execute(
|
||||
"SELECT * FROM upgrades WHERE status = ? ORDER BY proposed_at DESC LIMIT ?",
|
||||
(status.value, limit),
|
||||
).fetchall()
|
||||
else:
|
||||
rows = conn.execute(
|
||||
"SELECT * FROM upgrades ORDER BY proposed_at DESC LIMIT ?",
|
||||
(limit,),
|
||||
).fetchall()
|
||||
|
||||
conn.close()
|
||||
|
||||
return [
|
||||
Upgrade(
|
||||
id=r["id"],
|
||||
status=UpgradeStatus(r["status"]),
|
||||
proposed_at=r["proposed_at"],
|
||||
approved_at=r["approved_at"],
|
||||
applied_at=r["applied_at"],
|
||||
rejected_at=r["rejected_at"],
|
||||
branch_name=r["branch_name"],
|
||||
description=r["description"],
|
||||
files_changed=json.loads(r["files_changed"]) if r["files_changed"] else [],
|
||||
diff_preview=r["diff_preview"] or "",
|
||||
test_passed=bool(r["test_passed"]),
|
||||
test_output=r["test_output"] or "",
|
||||
error_message=r["error_message"],
|
||||
approved_by=r["approved_by"],
|
||||
)
|
||||
for r in rows
|
||||
]
|
||||
|
||||
|
||||
def approve_upgrade(upgrade_id: str, approved_by: str = "dashboard") -> Optional[Upgrade]:
|
||||
"""Approve an upgrade proposal."""
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
|
||||
conn = _get_conn()
|
||||
cursor = conn.execute(
|
||||
"""
|
||||
UPDATE upgrades
|
||||
SET status = ?, approved_at = ?, approved_by = ?
|
||||
WHERE id = ? AND status = ?
|
||||
""",
|
||||
(UpgradeStatus.APPROVED.value, now, approved_by, upgrade_id, UpgradeStatus.PROPOSED.value),
|
||||
)
|
||||
conn.commit()
|
||||
updated = cursor.rowcount > 0
|
||||
conn.close()
|
||||
|
||||
if not updated:
|
||||
return None
|
||||
|
||||
return get_upgrade(upgrade_id)
|
||||
|
||||
|
||||
def reject_upgrade(upgrade_id: str) -> Optional[Upgrade]:
|
||||
"""Reject an upgrade proposal."""
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
|
||||
conn = _get_conn()
|
||||
cursor = conn.execute(
|
||||
"""
|
||||
UPDATE upgrades
|
||||
SET status = ?, rejected_at = ?
|
||||
WHERE id = ? AND status = ?
|
||||
""",
|
||||
(UpgradeStatus.REJECTED.value, now, upgrade_id, UpgradeStatus.PROPOSED.value),
|
||||
)
|
||||
conn.commit()
|
||||
updated = cursor.rowcount > 0
|
||||
conn.close()
|
||||
|
||||
if not updated:
|
||||
return None
|
||||
|
||||
return get_upgrade(upgrade_id)
|
||||
|
||||
|
||||
def mark_applied(upgrade_id: str) -> Optional[Upgrade]:
|
||||
"""Mark upgrade as successfully applied."""
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
|
||||
conn = _get_conn()
|
||||
cursor = conn.execute(
|
||||
"""
|
||||
UPDATE upgrades
|
||||
SET status = ?, applied_at = ?
|
||||
WHERE id = ? AND status = ?
|
||||
""",
|
||||
(UpgradeStatus.APPLIED.value, now, upgrade_id, UpgradeStatus.APPROVED.value),
|
||||
)
|
||||
conn.commit()
|
||||
updated = cursor.rowcount > 0
|
||||
conn.close()
|
||||
|
||||
if not updated:
|
||||
return None
|
||||
|
||||
return get_upgrade(upgrade_id)
|
||||
|
||||
|
||||
def mark_failed(upgrade_id: str, error_message: str) -> Optional[Upgrade]:
|
||||
"""Mark upgrade as failed."""
|
||||
conn = _get_conn()
|
||||
cursor = conn.execute(
|
||||
"""
|
||||
UPDATE upgrades
|
||||
SET status = ?, error_message = ?
|
||||
WHERE id = ? AND status = ?
|
||||
""",
|
||||
(UpgradeStatus.FAILED.value, error_message, upgrade_id, UpgradeStatus.APPROVED.value),
|
||||
)
|
||||
conn.commit()
|
||||
updated = cursor.rowcount > 0
|
||||
conn.close()
|
||||
|
||||
if not updated:
|
||||
return None
|
||||
|
||||
return get_upgrade(upgrade_id)
|
||||
|
||||
|
||||
def get_pending_count() -> int:
|
||||
"""Get count of pending (proposed) upgrades."""
|
||||
conn = _get_conn()
|
||||
row = conn.execute(
|
||||
"SELECT COUNT(*) as count FROM upgrades WHERE status = ?",
|
||||
(UpgradeStatus.PROPOSED.value,),
|
||||
).fetchone()
|
||||
conn.close()
|
||||
return row["count"]
|
||||
|
||||
|
||||
def prune_old_upgrades(older_than_days: int = 30) -> int:
|
||||
"""Delete old completed upgrades."""
|
||||
from datetime import timedelta
|
||||
|
||||
cutoff = (datetime.now(timezone.utc) - timedelta(days=older_than_days)).isoformat()
|
||||
|
||||
conn = _get_conn()
|
||||
cursor = conn.execute(
|
||||
"""
|
||||
DELETE FROM upgrades
|
||||
WHERE proposed_at < ? AND status IN ('applied', 'rejected', 'failed')
|
||||
""",
|
||||
(cutoff,),
|
||||
)
|
||||
deleted = cursor.rowcount
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
return deleted
|
||||
285
src/upgrades/queue.py
Normal file
285
src/upgrades/queue.py
Normal file
@@ -0,0 +1,285 @@
|
||||
"""Upgrade Queue management - bridges self-modify loop with approval workflow."""
|
||||
|
||||
import logging
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from upgrades.models import (
|
||||
Upgrade,
|
||||
UpgradeStatus,
|
||||
create_upgrade,
|
||||
get_upgrade,
|
||||
approve_upgrade,
|
||||
reject_upgrade,
|
||||
mark_applied,
|
||||
mark_failed,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
PROJECT_ROOT = Path(__file__).parent.parent.parent
|
||||
|
||||
|
||||
class UpgradeQueue:
|
||||
"""Manages the upgrade approval and application workflow."""
|
||||
|
||||
@staticmethod
|
||||
def propose(
|
||||
branch_name: str,
|
||||
description: str,
|
||||
files_changed: list[str],
|
||||
diff_preview: str,
|
||||
test_passed: bool = False,
|
||||
test_output: str = "",
|
||||
) -> Upgrade:
|
||||
"""Propose a new upgrade for approval.
|
||||
|
||||
This is called by the self-modify loop when it generates changes.
|
||||
The upgrade is created in 'proposed' state and waits for human approval.
|
||||
|
||||
Args:
|
||||
branch_name: Git branch with the changes
|
||||
description: What the upgrade does
|
||||
files_changed: List of modified files
|
||||
diff_preview: Short diff for review
|
||||
test_passed: Whether tests passed
|
||||
test_output: Test output
|
||||
|
||||
Returns:
|
||||
The created Upgrade proposal
|
||||
"""
|
||||
upgrade = create_upgrade(
|
||||
branch_name=branch_name,
|
||||
description=description,
|
||||
files_changed=files_changed,
|
||||
diff_preview=diff_preview,
|
||||
test_passed=test_passed,
|
||||
test_output=test_output,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Upgrade proposed: %s (%s) - %d files",
|
||||
upgrade.id[:8],
|
||||
branch_name,
|
||||
len(files_changed),
|
||||
)
|
||||
|
||||
# Log to event log
|
||||
try:
|
||||
from swarm.event_log import log_event, EventType
|
||||
log_event(
|
||||
EventType.SYSTEM_INFO,
|
||||
source="upgrade_queue",
|
||||
data={
|
||||
"upgrade_id": upgrade.id,
|
||||
"branch": branch_name,
|
||||
"description": description,
|
||||
"test_passed": test_passed,
|
||||
},
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return upgrade
|
||||
|
||||
@staticmethod
|
||||
def approve(upgrade_id: str, approved_by: str = "dashboard") -> Optional[Upgrade]:
|
||||
"""Approve an upgrade proposal.
|
||||
|
||||
Called from dashboard when user clicks "Approve".
|
||||
Does NOT apply the upgrade - that happens separately.
|
||||
|
||||
Args:
|
||||
upgrade_id: The upgrade to approve
|
||||
approved_by: Who approved it (for audit)
|
||||
|
||||
Returns:
|
||||
Updated Upgrade or None if not found/not in proposed state
|
||||
"""
|
||||
upgrade = approve_upgrade(upgrade_id, approved_by)
|
||||
|
||||
if upgrade:
|
||||
logger.info("Upgrade approved: %s by %s", upgrade_id[:8], approved_by)
|
||||
|
||||
return upgrade
|
||||
|
||||
@staticmethod
|
||||
def reject(upgrade_id: str) -> Optional[Upgrade]:
|
||||
"""Reject an upgrade proposal.
|
||||
|
||||
Called from dashboard when user clicks "Reject".
|
||||
Cleans up the branch.
|
||||
|
||||
Args:
|
||||
upgrade_id: The upgrade to reject
|
||||
|
||||
Returns:
|
||||
Updated Upgrade or None
|
||||
"""
|
||||
upgrade = reject_upgrade(upgrade_id)
|
||||
|
||||
if upgrade:
|
||||
logger.info("Upgrade rejected: %s", upgrade_id[:8])
|
||||
|
||||
# Clean up branch
|
||||
try:
|
||||
subprocess.run(
|
||||
["git", "branch", "-D", upgrade.branch_name],
|
||||
cwd=PROJECT_ROOT,
|
||||
capture_output=True,
|
||||
check=False,
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.warning("Failed to delete branch %s: %s", upgrade.branch_name, exc)
|
||||
|
||||
return upgrade
|
||||
|
||||
@staticmethod
|
||||
def apply(upgrade_id: str) -> tuple[bool, str]:
|
||||
"""Apply an approved upgrade.
|
||||
|
||||
This is the critical operation that actually modifies the codebase:
|
||||
1. Checks out the branch
|
||||
2. Runs tests
|
||||
3. If tests pass: merges to main
|
||||
4. Updates upgrade status
|
||||
|
||||
Args:
|
||||
upgrade_id: The approved upgrade to apply
|
||||
|
||||
Returns:
|
||||
(success, message) tuple
|
||||
"""
|
||||
upgrade = get_upgrade(upgrade_id)
|
||||
|
||||
if not upgrade:
|
||||
return False, "Upgrade not found"
|
||||
|
||||
if upgrade.status != UpgradeStatus.APPROVED:
|
||||
return False, f"Upgrade not approved (status: {upgrade.status.value})"
|
||||
|
||||
logger.info("Applying upgrade: %s (%s)", upgrade_id[:8], upgrade.branch_name)
|
||||
|
||||
try:
|
||||
# 1. Checkout branch
|
||||
result = subprocess.run(
|
||||
["git", "checkout", upgrade.branch_name],
|
||||
cwd=PROJECT_ROOT,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
mark_failed(upgrade_id, f"Checkout failed: {result.stderr}")
|
||||
return False, f"Failed to checkout branch: {result.stderr}"
|
||||
|
||||
# 2. Run tests
|
||||
result = subprocess.run(
|
||||
["python", "-m", "pytest", "tests/", "-x", "-q"],
|
||||
cwd=PROJECT_ROOT,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=120,
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
mark_failed(upgrade_id, f"Tests failed: {result.stdout}\n{result.stderr}")
|
||||
# Switch back to main
|
||||
subprocess.run(["git", "checkout", "main"], cwd=PROJECT_ROOT, check=False)
|
||||
return False, "Tests failed"
|
||||
|
||||
# 3. Merge to main
|
||||
result = subprocess.run(
|
||||
["git", "checkout", "main"],
|
||||
cwd=PROJECT_ROOT,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
mark_failed(upgrade_id, f"Failed to checkout main: {result.stderr}")
|
||||
return False, "Failed to checkout main"
|
||||
|
||||
result = subprocess.run(
|
||||
["git", "merge", "--no-ff", upgrade.branch_name, "-m", f"Apply upgrade: {upgrade.description}"],
|
||||
cwd=PROJECT_ROOT,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
mark_failed(upgrade_id, f"Merge failed: {result.stderr}")
|
||||
return False, "Merge failed"
|
||||
|
||||
# 4. Mark as applied
|
||||
mark_applied(upgrade_id)
|
||||
|
||||
# 5. Clean up branch
|
||||
subprocess.run(
|
||||
["git", "branch", "-d", upgrade.branch_name],
|
||||
cwd=PROJECT_ROOT,
|
||||
capture_output=True,
|
||||
check=False,
|
||||
)
|
||||
|
||||
logger.info("Upgrade applied successfully: %s", upgrade_id[:8])
|
||||
return True, "Upgrade applied successfully"
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
mark_failed(upgrade_id, "Tests timed out")
|
||||
subprocess.run(["git", "checkout", "main"], cwd=PROJECT_ROOT, check=False)
|
||||
return False, "Tests timed out"
|
||||
|
||||
except Exception as exc:
|
||||
error_msg = str(exc)
|
||||
mark_failed(upgrade_id, error_msg)
|
||||
subprocess.run(["git", "checkout", "main"], cwd=PROJECT_ROOT, check=False)
|
||||
return False, f"Error: {error_msg}"
|
||||
|
||||
@staticmethod
|
||||
def get_full_diff(upgrade_id: str) -> str:
|
||||
"""Get full git diff for an upgrade.
|
||||
|
||||
Args:
|
||||
upgrade_id: The upgrade to get diff for
|
||||
|
||||
Returns:
|
||||
Git diff output
|
||||
"""
|
||||
upgrade = get_upgrade(upgrade_id)
|
||||
if not upgrade:
|
||||
return "Upgrade not found"
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["git", "diff", "main..." + upgrade.branch_name],
|
||||
cwd=PROJECT_ROOT,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
return result.stdout if result.returncode == 0 else result.stderr
|
||||
except Exception as exc:
|
||||
return f"Error getting diff: {exc}"
|
||||
|
||||
|
||||
# Convenience functions for self-modify loop
|
||||
def propose_upgrade_from_loop(
|
||||
branch_name: str,
|
||||
description: str,
|
||||
files_changed: list[str],
|
||||
diff: str,
|
||||
test_output: str = "",
|
||||
) -> Upgrade:
|
||||
"""Called by self-modify loop to propose an upgrade.
|
||||
|
||||
Tests are expected to have been run by the loop before calling this.
|
||||
"""
|
||||
# Check if tests passed from output
|
||||
test_passed = "passed" in test_output.lower() or " PASSED " in test_output
|
||||
|
||||
return UpgradeQueue.propose(
|
||||
branch_name=branch_name,
|
||||
description=description,
|
||||
files_changed=files_changed,
|
||||
diff_preview=diff[:2000], # First 2000 chars
|
||||
test_passed=test_passed,
|
||||
test_output=test_output,
|
||||
)
|
||||
@@ -119,6 +119,34 @@ class WebSocketManager:
|
||||
def connection_count(self) -> int:
|
||||
return len(self._connections)
|
||||
|
||||
async def broadcast_json(self, data: dict) -> int:
|
||||
"""Broadcast raw JSON data to all connected clients.
|
||||
|
||||
Args:
|
||||
data: Dictionary to send as JSON
|
||||
|
||||
Returns:
|
||||
Number of clients notified
|
||||
"""
|
||||
import json
|
||||
|
||||
message = json.dumps(data)
|
||||
disconnected = []
|
||||
count = 0
|
||||
|
||||
for ws in self._connections:
|
||||
try:
|
||||
await ws.send_text(message)
|
||||
count += 1
|
||||
except Exception:
|
||||
disconnected.append(ws)
|
||||
|
||||
# Clean up dead connections
|
||||
for ws in disconnected:
|
||||
self.disconnect(ws)
|
||||
|
||||
return count
|
||||
|
||||
@property
|
||||
def event_history(self) -> list[WSEvent]:
|
||||
return list(self._event_history)
|
||||
|
||||
Reference in New Issue
Block a user