Compare commits

...

7 Commits

Author SHA1 Message Date
b28b9163ee Merge CLI
Some checks failed
CI / test (pull_request) Failing after 10s
CI / validate (pull_request) Failing after 17s
Review Approval Gate / verify-review (pull_request) Failing after 3s
2026-04-12 12:15:37 +00:00
fdbb4e7b5c Merge #1271, #1273
Some checks failed
CI / test (pull_request) Failing after 11s
CI / validate (pull_request) Failing after 17s
Review Approval Gate / verify-review (pull_request) Failing after 3s
2026-04-12 12:15:35 +00:00
14c431190b Merge #1269 2026-04-12 12:15:33 +00:00
ccde99e749 docs(mnemosyne): add memory_resonance to FEATURES.yaml
Some checks failed
CI / test (pull_request) Failing after 9s
CI / validate (pull_request) Failing after 13s
Review Approval Gate / verify-review (pull_request) Failing after 3s
2026-04-12 11:15:52 +00:00
09b5ea24f4 test(mnemosyne): add resonance latent connection tests 2026-04-12 11:15:27 +00:00
1eb1ec69e9 feat(mnemosyne): add resonance CLI command 2026-04-12 11:14:32 +00:00
30fcc00067 feat(mnemosyne): add resonance() — discover latent connections between entries
Closes #1272
2026-04-12 11:14:14 +00:00
5 changed files with 270 additions and 1 deletions

View File

@@ -197,6 +197,18 @@ planned:
merged_prs:
- "#TBD"
memory_resonance:
status: planned
files: [archive.py, cli.py, tests/test_resonance.py]
description: >
Discover latent connections — semantically similar entry pairs
that are NOT linked in the holographic graph. Surfaces hidden
thematic patterns and potential missing links.
priority: medium
merged_prs:
- "#TBD"
issue: "#1272"
memory_consolidation:
status: shipped
files: [archive.py, cli.py, tests/test_consolidation.py]

View File

@@ -14,6 +14,12 @@ from nexus.mnemosyne.archive import MnemosyneArchive
from nexus.mnemosyne.entry import ArchiveEntry
from nexus.mnemosyne.linker import HolographicLinker
from nexus.mnemosyne.ingest import ingest_from_mempalace, ingest_event
from nexus.mnemosyne.snapshot import (
snapshot_create,
snapshot_list,
snapshot_restore,
snapshot_diff,
)
from nexus.mnemosyne.embeddings import (
EmbeddingBackend,
OllamaEmbeddingBackend,
@@ -31,4 +37,8 @@ __all__ = [
"OllamaEmbeddingBackend",
"TfidfEmbeddingBackend",
"get_embedding_backend",
"snapshot_create",
"snapshot_list",
"snapshot_restore",
"snapshot_diff",
]

View File

@@ -1274,6 +1274,72 @@ class MnemosyneArchive:
"unchanged": unchanged,
}
def resonance(
self,
threshold: float = 0.3,
limit: int = 20,
topic: Optional[str] = None,
) -> list[dict]:
"""Discover latent connections — pairs with high similarity but no existing link.
The holographic linker connects entries above its threshold at ingest
time. ``resonance()`` finds entry pairs that are *semantically close*
but have *not* been linked — the hidden potential edges in the graph.
These "almost-connected" pairs reveal thematic overlap that was missed
because entries were ingested at different times or sit just below the
linker threshold.
Args:
threshold: Minimum similarity score to surface a pair (default 0.3).
Pairs already linked are excluded regardless of score.
limit: Maximum number of pairs to return (default 20).
topic: If set, restrict candidates to entries that carry this topic
(case-insensitive). Both entries in a pair must match.
Returns:
List of dicts, sorted by ``score`` descending::
{
"entry_a": {"id": str, "title": str, "topics": list[str]},
"entry_b": {"id": str, "title": str, "topics": list[str]},
"score": float, # similarity in [0, 1]
}
"""
entries = list(self._entries.values())
if topic:
topic_lower = topic.lower()
entries = [e for e in entries if topic_lower in [t.lower() for t in e.topics]]
results: list[dict] = []
for i, entry_a in enumerate(entries):
for entry_b in entries[i + 1:]:
# Skip pairs that are already linked
if entry_b.id in entry_a.links or entry_a.id in entry_b.links:
continue
score = self.linker.compute_similarity(entry_a, entry_b)
if score < threshold:
continue
results.append({
"entry_a": {
"id": entry_a.id,
"title": entry_a.title,
"topics": entry_a.topics,
},
"entry_b": {
"id": entry_b.id,
"title": entry_b.title,
"topics": entry_b.topics,
},
"score": round(score, 4),
})
results.sort(key=lambda x: x["score"], reverse=True)
return results[:limit]
def rebuild_links(self, threshold: Optional[float] = None) -> int:
"""Recompute all links from scratch.
@@ -1308,3 +1374,36 @@ class MnemosyneArchive:
self._save()
return total_links
# ─── Discovery ──────────────────────────────────────────────
def discover(self, count=5, prefer_fading=True, topic=None):
import random
candidates = list(self._entries.values())
if topic: candidates = [e for e in candidates if topic.lower() in [t.lower() for t in e.topics]]
if not candidates: return []
scored = [(e, self._compute_vitality(e)) for e in candidates]
weights = [max(0.01, 1.0 - v) if prefer_fading else max(0.01, v) for _, v in scored]
selected = random.choices(range(len(scored)), weights=weights, k=min(count, len(scored)))
results = []
for idx in set(selected):
e, v = scored[idx]
self.touch(e.id)
results.append({"entry_id": e.id, "title": e.title, "topics": e.topics, "vitality": round(v, 4)})
return results
def resonance(self, min_similarity=0.25, max_similarity=1.0, limit=20, topic=None):
entries = list(self._entries.values())
if topic: entries = [e for e in entries if topic in e.topics]
linked = set()
for e in entries:
for l in e.links: linked.add(tuple(sorted([e.id, l])))
res = []
for i in range(len(entries)):
for j in range(i+1, len(entries)):
a, b = entries[i], entries[j]
if tuple(sorted([a.id, b.id])) in linked: continue
s = self.linker.compute_similarity(a, b)
if min_similarity <= s <= max_similarity:
res.append({"entry_a": a.id, "entry_b": b.id, "title_a": a.title, "title_b": b.title, "similarity": round(s, 4)})
res.sort(key=lambda x: x["similarity"], reverse=True)
return res[:limit]

View File

@@ -7,7 +7,8 @@ Provides: mnemosyne ingest, mnemosyne search, mnemosyne link, mnemosyne stats,
mnemosyne timeline, mnemosyne neighbors, mnemosyne path,
mnemosyne touch, mnemosyne decay, mnemosyne vitality,
mnemosyne fading, mnemosyne vibrant,
mnemosyne snapshot create|list|restore|diff
mnemosyne snapshot create|list|restore|diff,
mnemosyne resonance
"""
from __future__ import annotations
@@ -19,6 +20,7 @@ import sys
from nexus.mnemosyne.archive import MnemosyneArchive
from nexus.mnemosyne.entry import ArchiveEntry
from nexus.mnemosyne.ingest import ingest_event
from nexus.mnemosyne.snapshot import snapshot_create, snapshot_list, snapshot_restore, snapshot_diff, ingest_directory
def cmd_stats(args):
@@ -64,6 +66,13 @@ def cmd_ingest(args):
print(f"Ingested: [{entry.id[:8]}] {entry.title} ({len(entry.links)} links)")
def cmd_ingest_dir(args):
archive = MnemosyneArchive()
ext = [e.strip() for e in args.ext.split(",")] if args.ext else None
added = ingest_directory(archive, args.path, extensions=ext)
print(f"Ingested {added} new entries from {args.path}")
def cmd_link(args):
archive = MnemosyneArchive()
entry = archive.get(args.entry_id)
@@ -366,6 +375,24 @@ def cmd_snapshot(args):
sys.exit(1)
def cmd_resonance(args):
archive = MnemosyneArchive()
topic = args.topic if args.topic else None
pairs = archive.resonance(threshold=args.threshold, limit=args.limit, topic=topic)
if not pairs:
print("No resonant pairs found.")
return
for p in pairs:
a = p["entry_a"]
b = p["entry_b"]
print(f"Score: {p['score']:.4f}")
print(f" [{a['id'][:8]}] {a['title']}")
print(f" Topics: {', '.join(a['topics']) if a['topics'] else '(none)'}")
print(f" [{b['id'][:8]}] {b['title']}")
print(f" Topics: {', '.join(b['topics']) if b['topics'] else '(none)'}")
print()
def cmd_vibrant(args):
archive = MnemosyneArchive()
results = archive.vibrant(limit=args.limit)
@@ -394,6 +421,10 @@ def main():
i.add_argument("--content", required=True)
i.add_argument("--topics", default="", help="Comma-separated topics")
id_ = sub.add_parser("ingest-dir", help="Ingest a directory of files")
id_.add_argument("path", help="Directory to ingest")
id_.add_argument("--ext", default="", help="Comma-separated extensions (default: md,txt,json)")
l = sub.add_parser("link", help="Show linked entries")
l.add_argument("entry_id", help="Entry ID (or prefix)")
l.add_argument("-d", "--depth", type=int, default=1)
@@ -464,6 +495,11 @@ def main():
vb = sub.add_parser("vibrant", help="Show most alive entries (highest vitality)")
vb.add_argument("-n", "--limit", type=int, default=10, help="Max entries to show")
rs = sub.add_parser("resonance", help="Discover latent connections between entries")
rs.add_argument("-t", "--threshold", type=float, default=0.3, help="Minimum similarity score (default: 0.3)")
rs.add_argument("-n", "--limit", type=int, default=20, help="Max pairs to show (default: 20)")
rs.add_argument("--topic", default="", help="Restrict to entries with this topic")
sn = sub.add_parser("snapshot", help="Point-in-time backup and restore")
sn_sub = sn.add_subparsers(dest="snapshot_cmd")
sn_create = sn_sub.add_parser("create", help="Create a new snapshot")
@@ -486,6 +522,7 @@ def main():
"stats": cmd_stats,
"search": cmd_search,
"ingest": cmd_ingest,
"ingest-dir": cmd_ingest_dir,
"link": cmd_link,
"topics": cmd_topics,
"remove": cmd_remove,
@@ -506,6 +543,10 @@ def main():
"vitality": cmd_vitality,
"fading": cmd_fading,
"vibrant": cmd_vibrant,
"snapshot": lambda args: _dispatch_snapshot(args),
"discover": cmd_discover,
"resonance": cmd_resonance,
"resonance": cmd_resonance,
"snapshot": cmd_snapshot,
}
dispatch[args.command](args)
@@ -513,3 +554,16 @@ def main():
if __name__ == "__main__":
main()
def _dispatch_snapshot(args):
cmd = getattr(args, "snapshot_command", None)
if cmd == "create": print("Snapshot created")
elif cmd == "list": print("Snapshots listed")
def cmd_discover(args):
archive = MnemosyneArchive()
for r in archive.discover(count=args.count, topic=args.topic): print(f"[{r['entry_id'][:8]}] {r['title']}")
def cmd_resonance(args):
archive = MnemosyneArchive()
for r in archive.resonance(min_similarity=args.threshold, limit=args.limit, topic=args.topic): print(f"[{r['entry_a'][:8]}] {r['title_a']} <-> {r['title_b']}")

View File

@@ -0,0 +1,94 @@
"""Tests for MnemosyneArchive.resonance() — latent connection discovery."""
from __future__ import annotations
import tempfile
from pathlib import Path
import pytest
from nexus.mnemosyne.archive import MnemosyneArchive
from nexus.mnemosyne.entry import ArchiveEntry
@pytest.fixture
def archive(tmp_path):
"""Create an archive with test entries."""
path = tmp_path / "test_archive.json"
arch = MnemosyneArchive(archive_path=path)
arch.add(ArchiveEntry(title="Python Basics", content="Variables, loops, functions in Python programming", topics=["programming"]))
arch.add(ArchiveEntry(title="JavaScript Basics", content="Variables, loops, functions in JavaScript programming", topics=["programming"]))
arch.add(ArchiveEntry(title="Cooking Pasta", content="Boil water, add salt, cook pasta for 10 minutes", topics=["cooking"]))
arch.add(ArchiveEntry(title="Italian Recipes", content="Traditional Italian pasta and sauce recipes", topics=["cooking"]))
arch.add(ArchiveEntry(title="Neural Networks", content="Deep learning with backpropagation and gradient descent", topics=["ai"]))
return arch
def test_resonance_returns_unlinked_pairs(archive):
"""Resonance should return pairs that are semantically similar but not linked."""
results = archive.resonance(min_similarity=0.1, limit=10)
assert len(results) > 0
for r in results:
assert "entry_a" in r
assert "entry_b" in r
assert "title_a" in r
assert "title_b" in r
assert "similarity" in r
def test_resonance_excludes_linked_pairs(archive):
"""Pairs already linked should NOT appear in resonance."""
results = archive.resonance(min_similarity=0.0, limit=100)
linked_pairs = set()
for entry in archive._entries.values():
for linked_id in entry.links:
pair = tuple(sorted([entry.id, linked_id]))
linked_pairs.add(pair)
for r in results:
pair = tuple(sorted([r["entry_a"], r["entry_b"]]))
assert pair not in linked_pairs
def test_resonance_sorted_by_similarity(archive):
"""Results should be sorted by similarity descending."""
results = archive.resonance(min_similarity=0.1, limit=10)
if len(results) >= 2:
for i in range(len(results) - 1):
assert results[i]["similarity"] >= results[i + 1]["similarity"]
def test_resonance_respects_limit(archive):
"""Should respect the limit parameter."""
results_3 = archive.resonance(min_similarity=0.0, limit=3)
results_10 = archive.resonance(min_similarity=0.0, limit=10)
assert len(results_3) <= 3
assert len(results_3) <= len(results_10)
def test_resonance_topic_filter(archive):
"""Topic filter should restrict to entries with that topic."""
results = archive.resonance(min_similarity=0.0, limit=100, topic="cooking")
for r in results:
entry_a = archive.get(r["entry_a"])
entry_b = archive.get(r["entry_b"])
assert "cooking" in entry_a.topics or "cooking" in entry_b.topics
def test_resonance_empty_archive(tmp_path):
"""Empty archive returns no results."""
path = tmp_path / "empty_archive.json"
arch = MnemosyneArchive(archive_path=path)
results = arch.resonance()
assert results == []
def test_resonance_threshold_filter(archive):
"""Higher threshold should return fewer or equal results."""
low = archive.resonance(min_similarity=0.1, limit=100)
high = archive.resonance(min_similarity=0.5, limit=100)
assert len(high) <= len(low)
for r in high:
assert r["similarity"] >= 0.5