74 lines
2.7 KiB
Python
74 lines
2.7 KiB
Python
"""Sovereign Knowledge Ingester for Hermes Agent.
|
|
|
|
Uses Gemini 3.1 Pro to learn from Google Search in real-time and
|
|
persists the knowledge to Timmy's sovereign memory (both Markdown and Symbolic).
|
|
"""
|
|
|
|
import logging
|
|
import base64
|
|
from typing import Any, Dict, List, Optional
|
|
from agent.gemini_adapter import GeminiAdapter
|
|
from agent.symbolic_memory import SymbolicMemory
|
|
from tools.gitea_client import GiteaClient
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
class KnowledgeIngester:
|
|
def __init__(self):
|
|
self.adapter = GeminiAdapter()
|
|
self.gitea = GiteaClient()
|
|
self.symbolic = SymbolicMemory()
|
|
|
|
def learn_about(self, topic: str) -> str:
|
|
"""Searches Google, analyzes the results, and saves the knowledge."""
|
|
logger.info(f"Learning about: {topic}")
|
|
|
|
# 1. Search and Analyze
|
|
prompt = f"""
|
|
Please perform a deep dive into the following topic: {topic}
|
|
|
|
Use Google Search to find the most recent and relevant information.
|
|
Analyze the findings and provide a structured 'Knowledge Fragment' in Markdown format.
|
|
Include:
|
|
- Summary of the topic
|
|
- Key facts and recent developments
|
|
- Implications for Timmy's sovereign mission
|
|
- References (URLs)
|
|
"""
|
|
result = self.adapter.generate(
|
|
model="gemini-3.1-pro-preview",
|
|
prompt=prompt,
|
|
system_instruction="You are Timmy's Sovereign Knowledge Ingester. Your goal is to find and synthesize high-fidelity information from Google Search.",
|
|
grounding=True,
|
|
thinking=True
|
|
)
|
|
|
|
knowledge_fragment = result["text"]
|
|
|
|
# 2. Extract Symbolic Triples
|
|
self.symbolic.ingest_text(knowledge_fragment)
|
|
|
|
# 3. Persist to Timmy's Memory (Markdown)
|
|
repo = "Timmy_Foundation/timmy-config"
|
|
filename = f"memories/realtime_learning/{topic.lower().replace(' ', '_')}.md"
|
|
|
|
try:
|
|
sha = None
|
|
try:
|
|
existing = self.gitea.get_file(repo, filename)
|
|
sha = existing.get("sha")
|
|
except:
|
|
pass
|
|
|
|
content_b64 = base64.b64encode(knowledge_fragment.encode()).decode()
|
|
|
|
if sha:
|
|
self.gitea.update_file(repo, filename, content_b64, f"Update knowledge on {topic}", sha)
|
|
else:
|
|
self.gitea.create_file(repo, filename, content_b64, f"Initial knowledge on {topic}")
|
|
|
|
return f"Successfully learned about {topic}. Updated Timmy's Markdown memory and Symbolic Knowledge Graph."
|
|
except Exception as e:
|
|
logger.error(f"Failed to persist knowledge: {e}")
|
|
return f"Learned about {topic}, but failed to save to Markdown memory: {e}\n\n{knowledge_fragment}"
|