50 lines
1.7 KiB
Python
50 lines
1.7 KiB
Python
"""Phase 7: Long-Context Memory Compression.
|
|
|
|
Compresses years of session transcripts into a hierarchical, searchable "Life Log".
|
|
"""
|
|
|
|
import logging
|
|
import json
|
|
from typing import List, Dict, Any
|
|
from agent.gemini_adapter import GeminiAdapter
|
|
from agent.symbolic_memory import SymbolicMemory
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
class MemoryCompressor:
|
|
def __init__(self):
|
|
self.adapter = GeminiAdapter()
|
|
self.symbolic = SymbolicMemory()
|
|
|
|
def compress_transcripts(self, transcripts: str) -> Dict[str, Any]:
|
|
"""Compresses massive transcripts into a hierarchical memory map."""
|
|
logger.info("Compressing transcripts into hierarchical memory map.")
|
|
|
|
prompt = f"""
|
|
The following are session transcripts spanning a long period:
|
|
{transcripts}
|
|
|
|
Please perform a deep, recursive summarization of these transcripts.
|
|
Identify key themes, major decisions, evolving preferences, and significant events.
|
|
Create a hierarchical 'Life Log' map and extract high-fidelity symbolic triples for the Knowledge Graph.
|
|
|
|
Format the output as JSON:
|
|
{{
|
|
"summary": "...",
|
|
"hierarchy": {{...}},
|
|
"triples": [{{"s": "subject", "p": "predicate", "o": "object"}}]
|
|
}}
|
|
"""
|
|
result = self.adapter.generate(
|
|
model="gemini-3.1-pro-preview",
|
|
prompt=prompt,
|
|
system_instruction="You are Timmy's Memory Compressor. Your goal is to turn massive context into structured, searchable wisdom.",
|
|
thinking=True,
|
|
response_mime_type="application/json"
|
|
)
|
|
|
|
memory_data = json.loads(result["text"])
|
|
self.symbolic.ingest_text(json.dumps(memory_data["triples"]))
|
|
logger.info(f"Ingested {len(memory_data['triples'])} new memory triples.")
|
|
return memory_data
|