feat: code quality audit + autoresearch integration + infra hardening (#150)

This commit is contained in:
Alexander Whitestone
2026-03-08 12:50:44 -04:00
committed by GitHub
parent fd0ede0d51
commit ae3bb1cc21
186 changed files with 5129 additions and 3289 deletions

View File

@@ -76,7 +76,10 @@ class SparkEngine:
return event_id
def on_bid_submitted(
self, task_id: str, agent_id: str, bid_sats: int,
self,
task_id: str,
agent_id: str,
bid_sats: int,
) -> Optional[str]:
"""Capture a bid event."""
if not self._enabled:
@@ -90,12 +93,13 @@ class SparkEngine:
data=json.dumps({"bid_sats": bid_sats}),
)
logger.debug("Spark: captured bid %s%s (%d sats)",
agent_id[:8], task_id[:8], bid_sats)
logger.debug("Spark: captured bid %s%s (%d sats)", agent_id[:8], task_id[:8], bid_sats)
return event_id
def on_task_assigned(
self, task_id: str, agent_id: str,
self,
task_id: str,
agent_id: str,
) -> Optional[str]:
"""Capture a task-assigned event."""
if not self._enabled:
@@ -108,8 +112,7 @@ class SparkEngine:
task_id=task_id,
)
logger.debug("Spark: captured assignment %s%s",
task_id[:8], agent_id[:8])
logger.debug("Spark: captured assignment %s%s", task_id[:8], agent_id[:8])
return event_id
def on_task_completed(
@@ -128,10 +131,12 @@ class SparkEngine:
description=f"Task completed by {agent_id[:8]}",
agent_id=agent_id,
task_id=task_id,
data=json.dumps({
"result_length": len(result),
"winning_bid": winning_bid,
}),
data=json.dumps(
{
"result_length": len(result),
"winning_bid": winning_bid,
}
),
)
# Evaluate EIDOS prediction
@@ -154,8 +159,7 @@ class SparkEngine:
# Consolidate memory if enough events for this agent
self._maybe_consolidate(agent_id)
logger.debug("Spark: captured completion %s by %s",
task_id[:8], agent_id[:8])
logger.debug("Spark: captured completion %s by %s", task_id[:8], agent_id[:8])
return event_id
def on_task_failed(
@@ -186,8 +190,7 @@ class SparkEngine:
# Failures always worth consolidating
self._maybe_consolidate(agent_id)
logger.debug("Spark: captured failure %s by %s",
task_id[:8], agent_id[:8])
logger.debug("Spark: captured failure %s by %s", task_id[:8], agent_id[:8])
return event_id
def on_agent_joined(self, agent_id: str, name: str) -> Optional[str]:
@@ -288,7 +291,7 @@ class SparkEngine:
memory_type="pattern",
subject=agent_id,
content=f"Agent {agent_id[:8]} has a strong track record: "
f"{len(completions)}/{total} tasks completed successfully.",
f"{len(completions)}/{total} tasks completed successfully.",
confidence=min(0.95, 0.6 + total * 0.05),
source_events=total,
)
@@ -297,7 +300,7 @@ class SparkEngine:
memory_type="anomaly",
subject=agent_id,
content=f"Agent {agent_id[:8]} is struggling: only "
f"{len(completions)}/{total} tasks completed.",
f"{len(completions)}/{total} tasks completed.",
confidence=min(0.95, 0.6 + total * 0.05),
source_events=total,
)
@@ -347,6 +350,7 @@ class SparkEngine:
def _create_engine() -> SparkEngine:
try:
from config import settings
return SparkEngine(enabled=settings.spark_enabled)
except Exception:
return SparkEngine(enabled=True)