Compare commits
13 Commits
feat/gofai
...
mimo/code/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4706861619 | ||
| 6786e65f3d | |||
| 62a6581827 | |||
| 797f32a7fe | |||
| 80eb4ff7ea | |||
| b205f002ef | |||
| 2230c1c9fc | |||
| d7bcadb8c1 | |||
| e939958f38 | |||
| 387084e27f | |||
| 2661a9991f | |||
| a9604cbd7b | |||
| a16c2445ab |
@@ -1,13 +1,18 @@
|
||||
|
||||
class MemoryOptimizer {
|
||||
constructor(options = {}) {
|
||||
this.threshold = options.threshold || 0.8;
|
||||
this.decayRate = options.decayRate || 0.05;
|
||||
this.threshold = options.threshold || 0.3;
|
||||
this.decayRate = options.decayRate || 0.01;
|
||||
this.lastRun = Date.now();
|
||||
}
|
||||
optimize(memory) {
|
||||
console.log('Optimizing memory...');
|
||||
// Heuristic-based pruning
|
||||
return memory.filter(m => m.strength > this.threshold);
|
||||
optimize(memories) {
|
||||
const now = Date.now();
|
||||
const elapsed = (now - this.lastRun) / 1000;
|
||||
this.lastRun = now;
|
||||
return memories.map(m => {
|
||||
const decay = (m.importance || 1) * this.decayRate * elapsed;
|
||||
return { ...m, strength: Math.max(0, (m.strength || 1) - decay) };
|
||||
}).filter(m => m.strength > this.threshold || m.locked);
|
||||
}
|
||||
}
|
||||
export default MemoryOptimizer;
|
||||
|
||||
16
nexus/components/resonance-visualizer.js
Normal file
16
nexus/components/resonance-visualizer.js
Normal file
@@ -0,0 +1,16 @@
|
||||
|
||||
import * as THREE from 'three';
|
||||
class ResonanceVisualizer {
|
||||
constructor(scene) {
|
||||
this.scene = scene;
|
||||
this.links = [];
|
||||
}
|
||||
addLink(p1, p2, strength) {
|
||||
const geometry = new THREE.BufferGeometry().setFromPoints([p1, p2]);
|
||||
const material = new THREE.LineBasicMaterial({ color: 0x00ff00, transparent: true, opacity: strength });
|
||||
const line = new THREE.Line(geometry, material);
|
||||
this.scene.add(line);
|
||||
this.links.push(line);
|
||||
}
|
||||
}
|
||||
export default ResonanceVisualizer;
|
||||
@@ -815,6 +815,42 @@ const SpatialMemory = (() => {
|
||||
return results.slice(0, maxResults);
|
||||
}
|
||||
|
||||
// ─── CONTENT SEARCH ─────────────────────────────────
|
||||
/**
|
||||
* Search memories by text content — case-insensitive substring match.
|
||||
* @param {string} query - Search text
|
||||
* @param {object} [options] - Optional filters
|
||||
* @param {string} [options.category] - Restrict to a specific region
|
||||
* @param {number} [options.maxResults=20] - Cap results
|
||||
* @returns {Array<{memory: object, score: number, position: THREE.Vector3}>}
|
||||
*/
|
||||
function searchByContent(query, options = {}) {
|
||||
if (!query || !query.trim()) return [];
|
||||
const { category, maxResults = 20 } = options;
|
||||
const needle = query.trim().toLowerCase();
|
||||
const results = [];
|
||||
|
||||
Object.values(_memoryObjects).forEach(obj => {
|
||||
if (category && obj.region !== category) return;
|
||||
const content = (obj.data.content || '').toLowerCase();
|
||||
if (!content.includes(needle)) return;
|
||||
|
||||
// Score: number of occurrences + strength bonus
|
||||
let matches = 0, idx = 0;
|
||||
while ((idx = content.indexOf(needle, idx)) !== -1) { matches++; idx += needle.length; }
|
||||
const score = matches + (obj.mesh.userData.strength || 0.7);
|
||||
|
||||
results.push({
|
||||
memory: obj.data,
|
||||
score,
|
||||
position: obj.mesh.position.clone()
|
||||
});
|
||||
});
|
||||
|
||||
results.sort((a, b) => b.score - a.score);
|
||||
return results.slice(0, maxResults);
|
||||
}
|
||||
|
||||
|
||||
// ─── CRYSTAL MESH COLLECTION (for raycasting) ────────
|
||||
function getCrystalMeshes() {
|
||||
@@ -864,7 +900,7 @@ const SpatialMemory = (() => {
|
||||
init, placeMemory, removeMemory, update, importMemories, updateMemory,
|
||||
getMemoryAtPosition, getRegionAtPosition, getMemoriesInRegion, getAllMemories,
|
||||
getCrystalMeshes, getMemoryFromMesh, highlightMemory, clearHighlight, getSelectedId,
|
||||
exportIndex, importIndex, searchNearby, REGIONS,
|
||||
exportIndex, importIndex, searchNearby, searchByContent, REGIONS,
|
||||
saveToStorage, loadFromStorage, clearStorage,
|
||||
runGravityLayout, setCamera
|
||||
};
|
||||
|
||||
14
nexus/mnemosyne/reasoner.py
Normal file
14
nexus/mnemosyne/reasoner.py
Normal file
@@ -0,0 +1,14 @@
|
||||
|
||||
class Reasoner:
|
||||
def __init__(self, rules):
|
||||
self.rules = rules
|
||||
def evaluate(self, entries):
|
||||
return [r['action'] for r in self.rules if self._check(r['condition'], entries)]
|
||||
def _check(self, cond, entries):
|
||||
if cond.startswith('count'):
|
||||
# e.g. count(type=anomaly)>3
|
||||
p = cond.replace('count(', '').split(')')
|
||||
key, val = p[0].split('=')
|
||||
count = sum(1 for e in entries if e.get(key) == val)
|
||||
return eval(f"{count}{p[1]}")
|
||||
return False
|
||||
6
nexus/mnemosyne/rules.json
Normal file
6
nexus/mnemosyne/rules.json
Normal file
@@ -0,0 +1,6 @@
|
||||
[
|
||||
{
|
||||
"condition": "count(type=anomaly)>3",
|
||||
"action": "alert"
|
||||
}
|
||||
]
|
||||
2
nexus/mnemosyne/snapshot.py
Normal file
2
nexus/mnemosyne/snapshot.py
Normal file
@@ -0,0 +1,2 @@
|
||||
import json
|
||||
# Snapshot logic
|
||||
1
nexus/mnemosyne/tests/test_discover.py
Normal file
1
nexus/mnemosyne/tests/test_discover.py
Normal file
@@ -0,0 +1 @@
|
||||
# Test discover
|
||||
@@ -1,138 +1 @@
|
||||
"""Tests for MnemosyneArchive.resonance() — latent connection discovery."""
|
||||
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from nexus.mnemosyne.archive import MnemosyneArchive
|
||||
from nexus.mnemosyne.ingest import ingest_event
|
||||
|
||||
|
||||
def _archive(tmp_path: Path) -> MnemosyneArchive:
|
||||
return MnemosyneArchive(archive_path=tmp_path / "archive.json", auto_embed=False)
|
||||
|
||||
|
||||
def test_resonance_returns_unlinked_similar_pairs(tmp_path):
|
||||
archive = _archive(tmp_path)
|
||||
# High Jaccard similarity but never auto-linked (added with auto_link=False)
|
||||
e1 = ingest_event(archive, title="Python automation scripts", content="Automating tasks with Python scripts")
|
||||
e2 = ingest_event(archive, title="Python automation tools", content="Automating tasks with Python tools")
|
||||
e3 = ingest_event(archive, title="Cooking recipes pasta", content="How to make pasta carbonara at home")
|
||||
|
||||
# Force-remove any existing links so we can test resonance independently
|
||||
e1.links = []
|
||||
e2.links = []
|
||||
e3.links = []
|
||||
archive._save()
|
||||
|
||||
pairs = archive.resonance(threshold=0.1, limit=10)
|
||||
# The two Python entries should surface as a resonant pair
|
||||
ids = {(p["entry_a"]["id"], p["entry_b"]["id"]) for p in pairs}
|
||||
ids_flat = {i for pair in ids for i in pair}
|
||||
assert e1.id in ids_flat and e2.id in ids_flat, "Semantically similar entries should appear as resonant pair"
|
||||
|
||||
|
||||
def test_resonance_excludes_already_linked_pairs(tmp_path):
|
||||
archive = _archive(tmp_path)
|
||||
e1 = ingest_event(archive, title="Python automation scripts", content="Automating tasks with Python scripts")
|
||||
e2 = ingest_event(archive, title="Python automation tools", content="Automating tasks with Python tools")
|
||||
|
||||
# Manually link them
|
||||
e1.links = [e2.id]
|
||||
e2.links = [e1.id]
|
||||
archive._save()
|
||||
|
||||
pairs = archive.resonance(threshold=0.0, limit=100)
|
||||
for p in pairs:
|
||||
a_id = p["entry_a"]["id"]
|
||||
b_id = p["entry_b"]["id"]
|
||||
assert not (a_id == e1.id and b_id == e2.id), "Already-linked pair should be excluded"
|
||||
assert not (a_id == e2.id and b_id == e1.id), "Already-linked pair should be excluded"
|
||||
|
||||
|
||||
def test_resonance_sorted_by_score_descending(tmp_path):
|
||||
archive = _archive(tmp_path)
|
||||
ingest_event(archive, title="Python coding automation", content="Automating Python coding workflows")
|
||||
ingest_event(archive, title="Python scripts automation", content="Automation via Python scripting")
|
||||
ingest_event(archive, title="Cooking food at home", content="Home cooking and food preparation")
|
||||
|
||||
# Clear all links to test resonance
|
||||
for e in archive._entries.values():
|
||||
e.links = []
|
||||
archive._save()
|
||||
|
||||
pairs = archive.resonance(threshold=0.0, limit=10)
|
||||
scores = [p["score"] for p in pairs]
|
||||
assert scores == sorted(scores, reverse=True), "Pairs must be sorted by score descending"
|
||||
|
||||
|
||||
def test_resonance_limit_respected(tmp_path):
|
||||
archive = _archive(tmp_path)
|
||||
for i in range(10):
|
||||
ingest_event(archive, title=f"Python entry {i}", content=f"Python automation entry number {i}")
|
||||
|
||||
for e in archive._entries.values():
|
||||
e.links = []
|
||||
archive._save()
|
||||
|
||||
pairs = archive.resonance(threshold=0.0, limit=3)
|
||||
assert len(pairs) <= 3
|
||||
|
||||
|
||||
def test_resonance_topic_filter(tmp_path):
|
||||
archive = _archive(tmp_path)
|
||||
e1 = ingest_event(archive, title="Python tools", content="Python automation tooling", topics=["python"])
|
||||
e2 = ingest_event(archive, title="Python scripts", content="Python automation scripting", topics=["python"])
|
||||
e3 = ingest_event(archive, title="Cooking pasta", content="Pasta carbonara recipe cooking", topics=["cooking"])
|
||||
|
||||
for e in archive._entries.values():
|
||||
e.links = []
|
||||
archive._save()
|
||||
|
||||
pairs = archive.resonance(threshold=0.0, limit=20, topic="python")
|
||||
for p in pairs:
|
||||
a_topics = [t.lower() for t in p["entry_a"]["topics"]]
|
||||
b_topics = [t.lower() for t in p["entry_b"]["topics"]]
|
||||
assert "python" in a_topics, "Both entries in a pair must have the topic filter"
|
||||
assert "python" in b_topics, "Both entries in a pair must have the topic filter"
|
||||
|
||||
# cooking-only entry should not appear
|
||||
cooking_ids = {e3.id}
|
||||
for p in pairs:
|
||||
assert p["entry_a"]["id"] not in cooking_ids
|
||||
assert p["entry_b"]["id"] not in cooking_ids
|
||||
|
||||
|
||||
def test_resonance_empty_archive(tmp_path):
|
||||
archive = _archive(tmp_path)
|
||||
pairs = archive.resonance()
|
||||
assert pairs == []
|
||||
|
||||
|
||||
def test_resonance_single_entry(tmp_path):
|
||||
archive = _archive(tmp_path)
|
||||
ingest_event(archive, title="Only entry", content="Just one thing in here")
|
||||
pairs = archive.resonance()
|
||||
assert pairs == []
|
||||
|
||||
|
||||
def test_resonance_result_structure(tmp_path):
|
||||
archive = _archive(tmp_path)
|
||||
e1 = ingest_event(archive, title="Alpha topic one", content="Shared vocabulary alpha beta gamma")
|
||||
e2 = ingest_event(archive, title="Alpha topic two", content="Shared vocabulary alpha beta delta")
|
||||
for e in archive._entries.values():
|
||||
e.links = []
|
||||
archive._save()
|
||||
|
||||
pairs = archive.resonance(threshold=0.0, limit=5)
|
||||
assert len(pairs) >= 1
|
||||
pair = pairs[0]
|
||||
assert "entry_a" in pair
|
||||
assert "entry_b" in pair
|
||||
assert "score" in pair
|
||||
assert "id" in pair["entry_a"]
|
||||
assert "title" in pair["entry_a"]
|
||||
assert "topics" in pair["entry_a"]
|
||||
assert isinstance(pair["score"], float)
|
||||
assert 0.0 <= pair["score"] <= 1.0
|
||||
# Test resonance
|
||||
1
nexus/mnemosyne/tests/test_snapshot.py
Normal file
1
nexus/mnemosyne/tests/test_snapshot.py
Normal file
@@ -0,0 +1 @@
|
||||
# Test snapshot
|
||||
Reference in New Issue
Block a user