fix: dedup memory consolidation with existing memory search (#105)

_maybe_consolidate() now checks get_memories(subject=agent_id)
before storing. Skips if a memory of the same type (pattern/anomaly)
was created within the last hour. Prevents duplicate consolidation
entries on repeated task completion/failure events.

Also restructured branching: neutral success rates (0.3-0.8) now
return early instead of falling through.

9 new tests. 1465 total passing.
This commit is contained in:
2026-03-14 20:04:18 -04:00
parent 825f9e6bb4
commit b12e29b92e
2 changed files with 289 additions and 1 deletions

View File

@@ -273,6 +273,8 @@ class SparkEngine:
def _maybe_consolidate(self, agent_id: str) -> None:
"""Consolidate events into memories when enough data exists."""
from datetime import UTC, datetime, timedelta
agent_events = spark_memory.get_events(agent_id=agent_id, limit=50)
if len(agent_events) < 5:
return
@@ -286,7 +288,34 @@ class SparkEngine:
success_rate = len(completions) / total if total else 0
# Determine target memory type based on success rate
if success_rate >= 0.8:
target_memory_type = "pattern"
elif success_rate <= 0.3:
target_memory_type = "anomaly"
else:
return # No consolidation needed for neutral success rates
# Check for recent memories of the same type for this agent
existing_memories = spark_memory.get_memories(subject=agent_id, limit=5)
now = datetime.now(UTC)
one_hour_ago = now - timedelta(hours=1)
for memory in existing_memories:
if memory.memory_type == target_memory_type:
try:
created_at = datetime.fromisoformat(memory.created_at)
if created_at >= one_hour_ago:
logger.info(
"Consolidation: skipping — recent memory exists for %s",
agent_id[:8],
)
return
except (ValueError, TypeError):
continue
# Store the new memory
if target_memory_type == "pattern":
spark_memory.store_memory(
memory_type="pattern",
subject=agent_id,
@@ -295,7 +324,7 @@ class SparkEngine:
confidence=min(0.95, 0.6 + total * 0.05),
source_events=total,
)
elif success_rate <= 0.3:
else: # anomaly
spark_memory.store_memory(
memory_type="anomaly",
subject=agent_id,