Compare commits
1 Commits
mimo/creat
...
mimo/code/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0a0a2eb802 |
6
app.js
6
app.js
@@ -1,4 +1,4 @@
|
|||||||
import ResonanceVisualizer from './nexus/components/resonance-visualizer.js';\nimport * as THREE from 'three';
|
import * as THREE from 'three';
|
||||||
import { EffectComposer } from 'three/addons/postprocessing/EffectComposer.js';
|
import { EffectComposer } from 'three/addons/postprocessing/EffectComposer.js';
|
||||||
import { RenderPass } from 'three/addons/postprocessing/RenderPass.js';
|
import { RenderPass } from 'three/addons/postprocessing/RenderPass.js';
|
||||||
import { UnrealBloomPass } from 'three/addons/postprocessing/UnrealBloomPass.js';
|
import { UnrealBloomPass } from 'three/addons/postprocessing/UnrealBloomPass.js';
|
||||||
@@ -597,7 +597,7 @@ class PSELayer {
|
|||||||
|
|
||||||
let pseLayer;
|
let pseLayer;
|
||||||
|
|
||||||
let resonanceViz, metaLayer, neuroBridge, cbr, symbolicPlanner, knowledgeGraph, blackboard, symbolicEngine, calibrator;
|
let metaLayer, neuroBridge, cbr, symbolicPlanner, knowledgeGraph, blackboard, symbolicEngine, calibrator;
|
||||||
let agentFSMs = {};
|
let agentFSMs = {};
|
||||||
|
|
||||||
function setupGOFAI() {
|
function setupGOFAI() {
|
||||||
@@ -666,7 +666,7 @@ async function init() {
|
|||||||
scene = new THREE.Scene();
|
scene = new THREE.Scene();
|
||||||
scene.fog = new THREE.FogExp2(0x050510, 0.012);
|
scene.fog = new THREE.FogExp2(0x050510, 0.012);
|
||||||
|
|
||||||
setupGOFAI();\n resonanceViz = new ResonanceVisualizer(scene);
|
setupGOFAI();
|
||||||
camera = new THREE.PerspectiveCamera(65, window.innerWidth / window.innerHeight, 0.1, 1000);
|
camera = new THREE.PerspectiveCamera(65, window.innerWidth / window.innerHeight, 0.1, 1000);
|
||||||
camera.position.copy(playerPos);
|
camera.position.copy(playerPos);
|
||||||
|
|
||||||
|
|||||||
@@ -5,6 +5,10 @@ SQLite-backed store for lived experiences only. The model remembers
|
|||||||
what it perceived, what it thought, and what it did — nothing else.
|
what it perceived, what it thought, and what it did — nothing else.
|
||||||
|
|
||||||
Each row is one cycle of the perceive→think→act loop.
|
Each row is one cycle of the perceive→think→act loop.
|
||||||
|
|
||||||
|
Implements the GBrain "compiled truth + timeline" pattern (#1181):
|
||||||
|
- compiled_truths: current best understanding, rewritten when evidence changes
|
||||||
|
- experiences: append-only evidence trail that never gets edited
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import sqlite3
|
import sqlite3
|
||||||
@@ -51,6 +55,27 @@ class ExperienceStore:
|
|||||||
ON experiences(timestamp DESC);
|
ON experiences(timestamp DESC);
|
||||||
CREATE INDEX IF NOT EXISTS idx_exp_session
|
CREATE INDEX IF NOT EXISTS idx_exp_session
|
||||||
ON experiences(session_id);
|
ON experiences(session_id);
|
||||||
|
|
||||||
|
-- GBrain compiled truth pattern (#1181)
|
||||||
|
-- Current best understanding about an entity/topic.
|
||||||
|
-- Rewritten when new evidence changes the picture.
|
||||||
|
-- The timeline (experiences table) is the evidence trail — never edited.
|
||||||
|
CREATE TABLE IF NOT EXISTS compiled_truths (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
entity TEXT NOT NULL, -- what this truth is about (person, topic, project)
|
||||||
|
truth TEXT NOT NULL, -- current best understanding
|
||||||
|
confidence REAL DEFAULT 0.5, -- 0.0–1.0
|
||||||
|
source_exp_id INTEGER, -- last experience that updated this truth
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL,
|
||||||
|
metadata_json TEXT DEFAULT '{}',
|
||||||
|
UNIQUE(entity) -- one compiled truth per entity
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_truth_entity
|
||||||
|
ON compiled_truths(entity);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_truth_updated
|
||||||
|
ON compiled_truths(updated_at DESC);
|
||||||
""")
|
""")
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
|
|
||||||
@@ -157,3 +182,117 @@ class ExperienceStore:
|
|||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
self.conn.close()
|
self.conn.close()
|
||||||
|
|
||||||
|
# ── GBrain compiled truth + timeline pattern (#1181) ────────────────
|
||||||
|
|
||||||
|
def upsert_compiled_truth(
|
||||||
|
self,
|
||||||
|
entity: str,
|
||||||
|
truth: str,
|
||||||
|
confidence: float = 0.5,
|
||||||
|
source_exp_id: Optional[int] = None,
|
||||||
|
metadata: Optional[dict] = None,
|
||||||
|
) -> int:
|
||||||
|
"""Create or update the compiled truth for an entity.
|
||||||
|
|
||||||
|
This is the 'compiled truth on top' from the GBrain pattern.
|
||||||
|
When new evidence changes our understanding, we rewrite this
|
||||||
|
record. The timeline (experiences table) preserves what led
|
||||||
|
here — it is never edited.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
entity: What this truth is about (person, topic, project).
|
||||||
|
truth: Current best understanding.
|
||||||
|
confidence: 0.0–1.0 confidence score.
|
||||||
|
source_exp_id: Last experience ID that informed this truth.
|
||||||
|
metadata: Optional extra data as a dict.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The row ID of the compiled truth.
|
||||||
|
"""
|
||||||
|
now = time.time()
|
||||||
|
meta_json = json.dumps(metadata) if metadata else "{}"
|
||||||
|
|
||||||
|
self.conn.execute(
|
||||||
|
"""INSERT INTO compiled_truths
|
||||||
|
(entity, truth, confidence, source_exp_id, created_at, updated_at, metadata_json)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||||
|
ON CONFLICT(entity) DO UPDATE SET
|
||||||
|
truth = excluded.truth,
|
||||||
|
confidence = excluded.confidence,
|
||||||
|
source_exp_id = excluded.source_exp_id,
|
||||||
|
updated_at = excluded.updated_at,
|
||||||
|
metadata_json = excluded.metadata_json""",
|
||||||
|
(entity, truth, confidence, source_exp_id, now, now, meta_json),
|
||||||
|
)
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
row = self.conn.execute(
|
||||||
|
"SELECT id FROM compiled_truths WHERE entity = ?", (entity,)
|
||||||
|
).fetchone()
|
||||||
|
return row[0]
|
||||||
|
|
||||||
|
def get_compiled_truth(self, entity: str) -> Optional[dict]:
|
||||||
|
"""Get the current compiled truth for an entity."""
|
||||||
|
row = self.conn.execute(
|
||||||
|
"""SELECT id, entity, truth, confidence, source_exp_id,
|
||||||
|
created_at, updated_at, metadata_json
|
||||||
|
FROM compiled_truths WHERE entity = ?""",
|
||||||
|
(entity,),
|
||||||
|
).fetchone()
|
||||||
|
if not row:
|
||||||
|
return None
|
||||||
|
return {
|
||||||
|
"id": row[0],
|
||||||
|
"entity": row[1],
|
||||||
|
"truth": row[2],
|
||||||
|
"confidence": row[3],
|
||||||
|
"source_exp_id": row[4],
|
||||||
|
"created_at": row[5],
|
||||||
|
"updated_at": row[6],
|
||||||
|
"metadata": json.loads(row[7]) if row[7] else {},
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_all_compiled_truths(
|
||||||
|
self, min_confidence: float = 0.0, limit: int = 100
|
||||||
|
) -> list[dict]:
|
||||||
|
"""Get all compiled truths, optionally filtered by minimum confidence."""
|
||||||
|
rows = self.conn.execute(
|
||||||
|
"""SELECT id, entity, truth, confidence, source_exp_id,
|
||||||
|
created_at, updated_at, metadata_json
|
||||||
|
FROM compiled_truths
|
||||||
|
WHERE confidence >= ?
|
||||||
|
ORDER BY updated_at DESC
|
||||||
|
LIMIT ?""",
|
||||||
|
(min_confidence, limit),
|
||||||
|
).fetchall()
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"id": r[0], "entity": r[1], "truth": r[2],
|
||||||
|
"confidence": r[3], "source_exp_id": r[4],
|
||||||
|
"created_at": r[5], "updated_at": r[6],
|
||||||
|
"metadata": json.loads(r[7]) if r[7] else {},
|
||||||
|
}
|
||||||
|
for r in rows
|
||||||
|
]
|
||||||
|
|
||||||
|
def search_compiled_truths(self, query: str, limit: int = 10) -> list[dict]:
|
||||||
|
"""Search compiled truths by entity name or truth content (LIKE match)."""
|
||||||
|
rows = self.conn.execute(
|
||||||
|
"""SELECT id, entity, truth, confidence, source_exp_id,
|
||||||
|
created_at, updated_at, metadata_json
|
||||||
|
FROM compiled_truths
|
||||||
|
WHERE entity LIKE ? OR truth LIKE ?
|
||||||
|
ORDER BY confidence DESC, updated_at DESC
|
||||||
|
LIMIT ?""",
|
||||||
|
(f"%{query}%", f"%{query}%", limit),
|
||||||
|
).fetchall()
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"id": r[0], "entity": r[1], "truth": r[2],
|
||||||
|
"confidence": r[3], "source_exp_id": r[4],
|
||||||
|
"created_at": r[5], "updated_at": r[6],
|
||||||
|
"metadata": json.loads(r[7]) if r[7] else {},
|
||||||
|
}
|
||||||
|
for r in rows
|
||||||
|
]
|
||||||
|
|||||||
@@ -1,22 +0,0 @@
|
|||||||
|
|
||||||
"""Resonance Linker — Finds second-degree connections in the holographic graph."""
|
|
||||||
|
|
||||||
class ResonanceLinker:
|
|
||||||
def __init__(self, archive):
|
|
||||||
self.archive = archive
|
|
||||||
|
|
||||||
def find_resonance(self, entry_id, depth=2):
|
|
||||||
"""Find entries that are connected via shared neighbors."""
|
|
||||||
if entry_id not in self.archive._entries: return []
|
|
||||||
|
|
||||||
entry = self.archive._entries[entry_id]
|
|
||||||
neighbors = set(entry.links)
|
|
||||||
resonance = {}
|
|
||||||
|
|
||||||
for neighbor_id in neighbors:
|
|
||||||
if neighbor_id in self.archive._entries:
|
|
||||||
for second_neighbor in self.archive._entries[neighbor_id].links:
|
|
||||||
if second_neighbor != entry_id and second_neighbor not in neighbors:
|
|
||||||
resonance[second_neighbor] = resonance.get(second_neighbor, 0) + 1
|
|
||||||
|
|
||||||
return sorted(resonance.items(), key=lambda x: x[1], reverse=True)
|
|
||||||
17
server.py
17
server.py
@@ -52,20 +52,19 @@ async def broadcast_handler(websocket: websockets.WebSocketServerProtocol):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
disconnected = set()
|
disconnected = set()
|
||||||
# Create broadcast tasks, tracking which client each task targets
|
# Create broadcast tasks for efficiency
|
||||||
task_client_pairs = []
|
tasks = []
|
||||||
for client in clients:
|
for client in clients:
|
||||||
if client != websocket and client.open:
|
if client != websocket and client.open:
|
||||||
task = asyncio.create_task(client.send(message))
|
tasks.append(asyncio.create_task(client.send(message)))
|
||||||
task_client_pairs.append((task, client))
|
|
||||||
|
if tasks:
|
||||||
if task_client_pairs:
|
|
||||||
tasks = [pair[0] for pair in task_client_pairs]
|
|
||||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||||
for i, result in enumerate(results):
|
for i, result in enumerate(results):
|
||||||
if isinstance(result, Exception):
|
if isinstance(result, Exception):
|
||||||
target_client = task_client_pairs[i][1]
|
# Find the client that failed
|
||||||
logger.error(f"Failed to send to client {target_client.remote_address}: {result}")
|
target_client = [c for c in clients if c != websocket][i]
|
||||||
|
logger.error(f"Failed to send to a client {target_client.remote_address}: {result}")
|
||||||
disconnected.add(target_client)
|
disconnected.add(target_client)
|
||||||
|
|
||||||
if disconnected:
|
if disconnected:
|
||||||
|
|||||||
Reference in New Issue
Block a user