Compare commits
14 Commits
feat/mnemo
...
feat/mnemo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3848b6f4ea | ||
|
|
3ed129ad2b | ||
|
|
392c73eb03 | ||
| ed5ed011c2 | |||
| 3c81c64f04 | |||
| 909a61702e | |||
| 12a5a75748 | |||
| 1273c22b15 | |||
| 038346b8a9 | |||
| b9f1602067 | |||
| c6f6f83a7c | |||
| 026e4a8cae | |||
| 75f39e4195 | |||
| 8c6255d262 |
59
app.js
59
app.js
@@ -4,7 +4,9 @@ import { RenderPass } from 'three/addons/postprocessing/RenderPass.js';
|
||||
import { UnrealBloomPass } from 'three/addons/postprocessing/UnrealBloomPass.js';
|
||||
import { SMAAPass } from 'three/addons/postprocessing/SMAAPass.js';
|
||||
import { SpatialMemory } from './nexus/components/spatial-memory.js';
|
||||
import { MemoryBirth } from './nexus/components/memory-birth.js';
|
||||
import { MemoryOptimizer } from './nexus/components/memory-optimizer.js';
|
||||
import { MemoryInspect } from './nexus/components/memory-inspect.js';
|
||||
|
||||
// ═══════════════════════════════════════════
|
||||
// NEXUS v1.1 — Portal System Update
|
||||
@@ -47,6 +49,7 @@ let frameCount = 0, lastFPSTime = 0, fps = 0;
|
||||
let chatOpen = true;
|
||||
let memoryFeedEntries = []; // Mnemosyne: recent memory events for feed panel
|
||||
let _memoryFilterOpen = false; // Mnemosyne: filter panel state
|
||||
let _clickStartX = 0, _clickStartY = 0; // Mnemosyne: click-vs-drag detection
|
||||
let loadProgress = 0;
|
||||
let performanceTier = 'high';
|
||||
|
||||
@@ -708,6 +711,10 @@ async function init() {
|
||||
createWorkshopTerminal();
|
||||
createAshStorm();
|
||||
SpatialMemory.init(scene);
|
||||
MemoryBirth.init(scene);
|
||||
MemoryBirth.wrapSpatialMemory(SpatialMemory);
|
||||
SpatialMemory.setCamera(camera);
|
||||
MemoryInspect.init({ onNavigate: _navigateToMemory });
|
||||
updateLoad(90);
|
||||
|
||||
loadSession();
|
||||
@@ -1899,6 +1906,8 @@ function setupControls() {
|
||||
mouseDown = true;
|
||||
orbitState.lastX = e.clientX;
|
||||
orbitState.lastY = e.clientY;
|
||||
_clickStartX = e.clientX;
|
||||
_clickStartY = e.clientY;
|
||||
|
||||
// Raycasting for portals
|
||||
if (!portalOverlayActive) {
|
||||
@@ -1917,7 +1926,37 @@ function setupControls() {
|
||||
}
|
||||
}
|
||||
});
|
||||
document.addEventListener('mouseup', () => { mouseDown = false; });
|
||||
document.addEventListener('mouseup', (e) => {
|
||||
const wasDrag = Math.abs(e.clientX - _clickStartX) > 5 || Math.abs(e.clientY - _clickStartY) > 5;
|
||||
mouseDown = false;
|
||||
if (wasDrag || e.target !== canvas) return;
|
||||
|
||||
// Crystal click detection (Mnemosyne inspect panel, issue #1227)
|
||||
if (!portalOverlayActive) {
|
||||
const mouse = new THREE.Vector2(
|
||||
(e.clientX / window.innerWidth) * 2 - 1,
|
||||
-(e.clientY / window.innerHeight) * 2 + 1
|
||||
);
|
||||
const raycaster = new THREE.Raycaster();
|
||||
raycaster.setFromCamera(mouse, camera);
|
||||
const crystalMeshes = SpatialMemory.getCrystalMeshes();
|
||||
const hits = raycaster.intersectObjects(crystalMeshes);
|
||||
if (hits.length > 0) {
|
||||
const entry = SpatialMemory.getMemoryFromMesh(hits[0].object);
|
||||
if (entry) {
|
||||
SpatialMemory.highlightMemory(entry.data.id);
|
||||
const regionDef = SpatialMemory.REGIONS[entry.region] || SpatialMemory.REGIONS.working;
|
||||
MemoryInspect.show(entry.data, regionDef);
|
||||
}
|
||||
} else {
|
||||
// Clicked empty space — close inspect panel and deselect crystal
|
||||
if (MemoryInspect.isOpen()) {
|
||||
SpatialMemory.clearHighlight();
|
||||
MemoryInspect.hide();
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
document.addEventListener('mousemove', (e) => {
|
||||
if (!mouseDown) return;
|
||||
if (document.activeElement === document.getElementById('chat-input')) return;
|
||||
@@ -2148,6 +2187,23 @@ function clearMemoryFeed() {
|
||||
console.info('[Mnemosyne] Memory feed cleared');
|
||||
}
|
||||
|
||||
/**
|
||||
* Navigate to a linked memory from the inspect panel.
|
||||
* Highlights the target crystal and re-opens the panel with its data.
|
||||
* @param {string} memId
|
||||
*/
|
||||
function _navigateToMemory(memId) {
|
||||
const all = SpatialMemory.getAllMemories();
|
||||
const data = all.find(m => m.id === memId);
|
||||
if (!data) {
|
||||
console.warn('[MemoryInspect] Linked memory not found in scene:', memId);
|
||||
return;
|
||||
}
|
||||
SpatialMemory.highlightMemory(memId);
|
||||
const regionDef = SpatialMemory.REGIONS[data.category] || SpatialMemory.REGIONS.working;
|
||||
MemoryInspect.show(data, regionDef);
|
||||
}
|
||||
|
||||
function handleMemoryMessage(data) {
|
||||
const action = data.action;
|
||||
const memory = data.memory;
|
||||
@@ -2867,6 +2923,7 @@ function gameLoop() {
|
||||
// Project Mnemosyne - Memory Orb Animation
|
||||
if (typeof animateMemoryOrbs === 'function') {
|
||||
SpatialMemory.update(delta);
|
||||
MemoryBirth.update(delta);
|
||||
animateMemoryOrbs(delta);
|
||||
}
|
||||
|
||||
|
||||
@@ -473,6 +473,9 @@ index.html
|
||||
|
||||
</div>
|
||||
|
||||
<!-- Memory Inspect Panel (Mnemosyne, issue #1227) -->
|
||||
<div id="memory-inspect-panel" class="memory-inspect-panel" style="display:none;" aria-label="Memory Inspect Panel">
|
||||
</div>
|
||||
|
||||
<script>
|
||||
// ─── MNEMOSYNE: Memory Filter Panel ───────────────────
|
||||
|
||||
263
nexus/components/memory-birth.js
Normal file
263
nexus/components/memory-birth.js
Normal file
@@ -0,0 +1,263 @@
|
||||
/**
|
||||
* Memory Birth Animation System
|
||||
*
|
||||
* Gives newly placed memory crystals a "materialization" entrance:
|
||||
* - Scale from 0 → 1 with elastic ease
|
||||
* - Bloom flash on arrival (emissive spike)
|
||||
* - Nearby related memories pulse in response
|
||||
* - Connection lines draw in progressively
|
||||
*
|
||||
* Usage:
|
||||
* import { MemoryBirth } from './nexus/components/memory-birth.js';
|
||||
* MemoryBirth.init(scene);
|
||||
* // After placing a crystal via SpatialMemory.placeMemory():
|
||||
* MemoryBirth.triggerBirth(crystalMesh, spatialMemory);
|
||||
* // In your render loop:
|
||||
* MemoryBirth.update(delta);
|
||||
*/
|
||||
|
||||
const MemoryBirth = (() => {
|
||||
// ─── CONFIG ────────────────────────────────────────
|
||||
const BIRTH_DURATION = 1.8; // seconds for full materialization
|
||||
const BLOOM_PEAK = 0.3; // when the bloom flash peaks (fraction of duration)
|
||||
const BLOOM_INTENSITY = 4.0; // emissive spike at peak
|
||||
const NEIGHBOR_PULSE_RADIUS = 8; // units — memories in this range pulse
|
||||
const NEIGHBOR_PULSE_INTENSITY = 2.5;
|
||||
const NEIGHBOR_PULSE_DURATION = 0.8;
|
||||
const LINE_DRAW_DURATION = 1.2; // seconds for connection lines to grow in
|
||||
|
||||
let _scene = null;
|
||||
let _activeBirths = []; // { mesh, startTime, duration, originPos }
|
||||
let _activePulses = []; // { mesh, startTime, duration, origEmissive, origIntensity }
|
||||
let _activeLineGrowths = []; // { line, startTime, duration, totalPoints }
|
||||
let _initialized = false;
|
||||
|
||||
// ─── ELASTIC EASE-OUT ─────────────────────────────
|
||||
function elasticOut(t) {
|
||||
if (t <= 0) return 0;
|
||||
if (t >= 1) return 1;
|
||||
const c4 = (2 * Math.PI) / 3;
|
||||
return Math.pow(2, -10 * t) * Math.sin((t * 10 - 0.75) * c4) + 1;
|
||||
}
|
||||
|
||||
// ─── SMOOTH STEP ──────────────────────────────────
|
||||
function smoothstep(edge0, edge1, x) {
|
||||
const t = Math.max(0, Math.min(1, (x - edge0) / (edge1 - edge0)));
|
||||
return t * t * (3 - 2 * t);
|
||||
}
|
||||
|
||||
// ─── INIT ─────────────────────────────────────────
|
||||
function init(scene) {
|
||||
_scene = scene;
|
||||
_initialized = true;
|
||||
console.info('[MemoryBirth] Initialized');
|
||||
}
|
||||
|
||||
// ─── TRIGGER BIRTH ────────────────────────────────
|
||||
function triggerBirth(mesh, spatialMemory) {
|
||||
if (!_initialized || !mesh) return;
|
||||
|
||||
// Start at zero scale
|
||||
mesh.scale.setScalar(0.001);
|
||||
|
||||
// Store original material values for bloom
|
||||
if (mesh.material) {
|
||||
mesh.userData._birthOrigEmissive = mesh.material.emissiveIntensity;
|
||||
mesh.userData._birthOrigOpacity = mesh.material.opacity;
|
||||
}
|
||||
|
||||
_activeBirths.push({
|
||||
mesh,
|
||||
startTime: Date.now() / 1000,
|
||||
duration: BIRTH_DURATION,
|
||||
spatialMemory,
|
||||
originPos: mesh.position.clone()
|
||||
});
|
||||
|
||||
// Trigger neighbor pulses for memories in the same region
|
||||
_triggerNeighborPulses(mesh, spatialMemory);
|
||||
|
||||
// Schedule connection line growth
|
||||
_triggerLineGrowth(mesh, spatialMemory);
|
||||
}
|
||||
|
||||
// ─── NEIGHBOR PULSE ───────────────────────────────
|
||||
function _triggerNeighborPulses(mesh, spatialMemory) {
|
||||
if (!spatialMemory || !mesh.position) return;
|
||||
|
||||
const allMems = spatialMemory.getAllMemories ? spatialMemory.getAllMemories() : [];
|
||||
const pos = mesh.position;
|
||||
const sourceId = mesh.userData.memId;
|
||||
|
||||
allMems.forEach(mem => {
|
||||
if (mem.id === sourceId) return;
|
||||
if (!mem.position) return;
|
||||
|
||||
const dx = mem.position[0] - pos.x;
|
||||
const dy = (mem.position[1] + 1.5) - pos.y;
|
||||
const dz = mem.position[2] - pos.z;
|
||||
const dist = Math.sqrt(dx * dx + dy * dy + dz * dz);
|
||||
|
||||
if (dist < NEIGHBOR_PULSE_RADIUS) {
|
||||
// Find the mesh for this memory
|
||||
const neighborMesh = _findMeshById(mem.id, spatialMemory);
|
||||
if (neighborMesh && neighborMesh.material) {
|
||||
_activePulses.push({
|
||||
mesh: neighborMesh,
|
||||
startTime: Date.now() / 1000,
|
||||
duration: NEIGHBOR_PULSE_DURATION,
|
||||
origEmissive: neighborMesh.material.emissiveIntensity,
|
||||
intensity: NEIGHBOR_PULSE_INTENSITY * (1 - dist / NEIGHBOR_PULSE_RADIUS)
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function _findMeshById(memId, spatialMemory) {
|
||||
// Access the internal memory objects through crystal meshes
|
||||
const meshes = spatialMemory.getCrystalMeshes ? spatialMemory.getCrystalMeshes() : [];
|
||||
return meshes.find(m => m.userData && m.userData.memId === memId);
|
||||
}
|
||||
|
||||
// ─── LINE GROWTH ──────────────────────────────────
|
||||
function _triggerLineGrowth(mesh, spatialMemory) {
|
||||
if (!_scene) return;
|
||||
|
||||
// Find connection lines that originate from this memory
|
||||
// Connection lines are stored as children of the scene or in a group
|
||||
_scene.children.forEach(child => {
|
||||
if (child.isLine && child.userData) {
|
||||
// Check if this line connects to our new memory
|
||||
if (child.userData.fromId === mesh.userData.memId ||
|
||||
child.userData.toId === mesh.userData.memId) {
|
||||
_activeLineGrowths.push({
|
||||
line: child,
|
||||
startTime: Date.now() / 1000,
|
||||
duration: LINE_DRAW_DURATION
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// ─── UPDATE (call every frame) ────────────────────
|
||||
function update(delta) {
|
||||
const now = Date.now() / 1000;
|
||||
|
||||
// ── Process births ──
|
||||
for (let i = _activeBirths.length - 1; i >= 0; i--) {
|
||||
const birth = _activeBirths[i];
|
||||
const elapsed = now - birth.startTime;
|
||||
const t = Math.min(1, elapsed / birth.duration);
|
||||
|
||||
if (t >= 1) {
|
||||
// Birth complete — ensure final state
|
||||
birth.mesh.scale.setScalar(1);
|
||||
if (birth.mesh.material) {
|
||||
birth.mesh.material.emissiveIntensity = birth.mesh.userData._birthOrigEmissive || 1.5;
|
||||
birth.mesh.material.opacity = birth.mesh.userData._birthOrigOpacity || 0.9;
|
||||
}
|
||||
_activeBirths.splice(i, 1);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Scale animation with elastic ease
|
||||
const scale = elasticOut(t);
|
||||
birth.mesh.scale.setScalar(Math.max(0.001, scale));
|
||||
|
||||
// Bloom flash — emissive intensity spikes at BLOOM_PEAK then fades
|
||||
if (birth.mesh.material) {
|
||||
const origEI = birth.mesh.userData._birthOrigEmissive || 1.5;
|
||||
const bloomT = smoothstep(0, BLOOM_PEAK, t) * (1 - smoothstep(BLOOM_PEAK, 1, t));
|
||||
birth.mesh.material.emissiveIntensity = origEI + bloomT * BLOOM_INTENSITY;
|
||||
|
||||
// Opacity fades in
|
||||
const origOp = birth.mesh.userData._birthOrigOpacity || 0.9;
|
||||
birth.mesh.material.opacity = origOp * smoothstep(0, 0.3, t);
|
||||
}
|
||||
|
||||
// Gentle upward float during birth (crystals are placed 1.5 above ground)
|
||||
birth.mesh.position.y = birth.originPos.y + (1 - scale) * 0.5;
|
||||
}
|
||||
|
||||
// ── Process neighbor pulses ──
|
||||
for (let i = _activePulses.length - 1; i >= 0; i--) {
|
||||
const pulse = _activePulses[i];
|
||||
const elapsed = now - pulse.startTime;
|
||||
const t = Math.min(1, elapsed / pulse.duration);
|
||||
|
||||
if (t >= 1) {
|
||||
// Restore original
|
||||
if (pulse.mesh.material) {
|
||||
pulse.mesh.material.emissiveIntensity = pulse.origEmissive;
|
||||
}
|
||||
_activePulses.splice(i, 1);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Pulse curve: quick rise, slow decay
|
||||
const pulseVal = Math.sin(t * Math.PI) * pulse.intensity;
|
||||
if (pulse.mesh.material) {
|
||||
pulse.mesh.material.emissiveIntensity = pulse.origEmissive + pulseVal;
|
||||
}
|
||||
}
|
||||
|
||||
// ── Process line growths ──
|
||||
for (let i = _activeLineGrowths.length - 1; i >= 0; i--) {
|
||||
const lg = _activeLineGrowths[i];
|
||||
const elapsed = now - lg.startTime;
|
||||
const t = Math.min(1, elapsed / lg.duration);
|
||||
|
||||
if (t >= 1) {
|
||||
// Ensure full visibility
|
||||
if (lg.line.material) {
|
||||
lg.line.material.opacity = lg.line.material.userData?._origOpacity || 0.6;
|
||||
}
|
||||
_activeLineGrowths.splice(i, 1);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Fade in the line
|
||||
if (lg.line.material) {
|
||||
const origOp = lg.line.material.userData?._origOpacity || 0.6;
|
||||
lg.line.material.opacity = origOp * smoothstep(0, 1, t);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ─── BIRTH COUNT (for UI/status) ─────────────────
|
||||
function getActiveBirthCount() {
|
||||
return _activeBirths.length;
|
||||
}
|
||||
|
||||
// ─── WRAP SPATIAL MEMORY ──────────────────────────
|
||||
/**
|
||||
* Wraps SpatialMemory.placeMemory() so every new crystal
|
||||
* automatically gets a birth animation.
|
||||
* Returns a proxy object that intercepts placeMemory calls.
|
||||
*/
|
||||
function wrapSpatialMemory(spatialMemory) {
|
||||
const original = spatialMemory.placeMemory.bind(spatialMemory);
|
||||
spatialMemory.placeMemory = function(mem) {
|
||||
const crystal = original(mem);
|
||||
if (crystal) {
|
||||
// Small delay to let THREE.js settle the object
|
||||
requestAnimationFrame(() => triggerBirth(crystal, spatialMemory));
|
||||
}
|
||||
return crystal;
|
||||
};
|
||||
console.info('[MemoryBirth] SpatialMemory.placeMemory wrapped — births will animate');
|
||||
return spatialMemory;
|
||||
}
|
||||
|
||||
return {
|
||||
init,
|
||||
triggerBirth,
|
||||
update,
|
||||
getActiveBirthCount,
|
||||
wrapSpatialMemory
|
||||
};
|
||||
})();
|
||||
|
||||
export { MemoryBirth };
|
||||
180
nexus/components/memory-inspect.js
Normal file
180
nexus/components/memory-inspect.js
Normal file
@@ -0,0 +1,180 @@
|
||||
// ═══════════════════════════════════════════════════════════
|
||||
// MNEMOSYNE — Memory Inspect Panel (issue #1227)
|
||||
// ═══════════════════════════════════════════════════════════
|
||||
//
|
||||
// Side-panel detail view for memory crystals.
|
||||
// Opens when a crystal is clicked; auto-closes on empty-space click.
|
||||
//
|
||||
// Usage from app.js:
|
||||
// MemoryInspect.init({ onNavigate: fn });
|
||||
// MemoryInspect.show(memData, regionDef);
|
||||
// MemoryInspect.hide();
|
||||
// MemoryInspect.isOpen();
|
||||
// ═══════════════════════════════════════════════════════════
|
||||
|
||||
const MemoryInspect = (() => {
|
||||
let _panel = null;
|
||||
let _onNavigate = null; // callback(memId) — navigate to a linked memory
|
||||
|
||||
// ─── INIT ────────────────────────────────────────────────
|
||||
function init(opts = {}) {
|
||||
_onNavigate = opts.onNavigate || null;
|
||||
_panel = document.getElementById('memory-inspect-panel');
|
||||
if (!_panel) {
|
||||
console.warn('[MemoryInspect] Panel element #memory-inspect-panel not found in DOM');
|
||||
}
|
||||
}
|
||||
|
||||
// ─── SHOW ────────────────────────────────────────────────
|
||||
function show(data, regionDef) {
|
||||
if (!_panel) return;
|
||||
|
||||
const region = regionDef || {};
|
||||
const colorHex = region.color
|
||||
? '#' + region.color.toString(16).padStart(6, '0')
|
||||
: '#4af0c0';
|
||||
const strength = data.strength != null ? data.strength : 0.7;
|
||||
const vitality = Math.round(Math.max(0, Math.min(1, strength)) * 100);
|
||||
|
||||
let vitalityColor = '#4af0c0';
|
||||
if (vitality < 30) vitalityColor = '#ff4466';
|
||||
else if (vitality < 60) vitalityColor = '#ffaa22';
|
||||
|
||||
const ts = data.timestamp ? new Date(data.timestamp) : null;
|
||||
const created = ts && !isNaN(ts) ? ts.toLocaleString() : '—';
|
||||
|
||||
// Linked memories
|
||||
let linksHtml = '';
|
||||
if (data.connections && data.connections.length > 0) {
|
||||
linksHtml = data.connections
|
||||
.map(id => `<button class="mi-link-btn" data-memid="${_esc(id)}">${_esc(id)}</button>`)
|
||||
.join('');
|
||||
} else {
|
||||
linksHtml = '<span class="mi-empty">No linked memories</span>';
|
||||
}
|
||||
|
||||
_panel.innerHTML = `
|
||||
<div class="mi-header" style="border-left:3px solid ${colorHex}">
|
||||
<span class="mi-region-glyph">${region.glyph || '\u25C8'}</span>
|
||||
<div class="mi-header-text">
|
||||
<div class="mi-id" title="${_esc(data.id || '')}">${_esc(_truncate(data.id || '\u2014', 28))}</div>
|
||||
<div class="mi-region" style="color:${colorHex}">${_esc(region.label || data.category || '\u2014')}</div>
|
||||
</div>
|
||||
<button class="mi-close" id="mi-close-btn" aria-label="Close inspect panel">\u2715</button>
|
||||
</div>
|
||||
<div class="mi-body">
|
||||
<div class="mi-section">
|
||||
<div class="mi-section-label">CONTENT</div>
|
||||
<div class="mi-content">${_esc(data.content || '(empty)')}</div>
|
||||
</div>
|
||||
<div class="mi-section">
|
||||
<div class="mi-section-label">VITALITY</div>
|
||||
<div class="mi-vitality-row">
|
||||
<div class="mi-vitality-bar-track">
|
||||
<div class="mi-vitality-bar" style="width:${vitality}%;background:${vitalityColor}"></div>
|
||||
</div>
|
||||
<span class="mi-vitality-pct" style="color:${vitalityColor}">${vitality}%</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="mi-section">
|
||||
<div class="mi-section-label">LINKED MEMORIES</div>
|
||||
<div class="mi-links" id="mi-links">${linksHtml}</div>
|
||||
</div>
|
||||
<div class="mi-section">
|
||||
<div class="mi-section-label">META</div>
|
||||
<div class="mi-meta-row">
|
||||
<span class="mi-meta-key">Source</span>
|
||||
<span class="mi-meta-val">${_esc(data.source || '\u2014')}</span>
|
||||
</div>
|
||||
<div class="mi-meta-row">
|
||||
<span class="mi-meta-key">Created</span>
|
||||
<span class="mi-meta-val">${created}</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="mi-actions">
|
||||
<button class="mi-action-btn" id="mi-copy-btn">\u2398 Copy</button>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
|
||||
// Wire close button
|
||||
const closeBtn = _panel.querySelector('#mi-close-btn');
|
||||
if (closeBtn) closeBtn.addEventListener('click', hide);
|
||||
|
||||
// Wire copy button
|
||||
const copyBtn = _panel.querySelector('#mi-copy-btn');
|
||||
if (copyBtn) {
|
||||
copyBtn.addEventListener('click', () => {
|
||||
const text = data.content || '';
|
||||
if (navigator.clipboard) {
|
||||
navigator.clipboard.writeText(text).then(() => {
|
||||
copyBtn.textContent = '\u2713 Copied';
|
||||
setTimeout(() => { copyBtn.textContent = '\u2398 Copy'; }, 1500);
|
||||
}).catch(() => _fallbackCopy(text));
|
||||
} else {
|
||||
_fallbackCopy(text);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Wire link navigation
|
||||
const linksContainer = _panel.querySelector('#mi-links');
|
||||
if (linksContainer) {
|
||||
linksContainer.addEventListener('click', (e) => {
|
||||
const btn = e.target.closest('.mi-link-btn');
|
||||
if (btn && _onNavigate) _onNavigate(btn.dataset.memid);
|
||||
});
|
||||
}
|
||||
|
||||
_panel.style.display = 'flex';
|
||||
// Trigger CSS animation
|
||||
requestAnimationFrame(() => _panel.classList.add('mi-visible'));
|
||||
}
|
||||
|
||||
// ─── HIDE ─────────────────────────────────────────────────
|
||||
function hide() {
|
||||
if (!_panel) return;
|
||||
_panel.classList.remove('mi-visible');
|
||||
// Wait for CSS transition before hiding
|
||||
const onEnd = () => {
|
||||
_panel.style.display = 'none';
|
||||
_panel.removeEventListener('transitionend', onEnd);
|
||||
};
|
||||
_panel.addEventListener('transitionend', onEnd);
|
||||
// Safety fallback if transition doesn't fire
|
||||
setTimeout(() => { if (_panel) _panel.style.display = 'none'; }, 350);
|
||||
}
|
||||
|
||||
// ─── QUERY ────────────────────────────────────────────────
|
||||
function isOpen() {
|
||||
return _panel != null && _panel.style.display !== 'none';
|
||||
}
|
||||
|
||||
// ─── HELPERS ──────────────────────────────────────────────
|
||||
function _esc(str) {
|
||||
return String(str)
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"');
|
||||
}
|
||||
|
||||
function _truncate(str, n) {
|
||||
return str.length > n ? str.slice(0, n - 1) + '\u2026' : str;
|
||||
}
|
||||
|
||||
function _fallbackCopy(text) {
|
||||
const ta = document.createElement('textarea');
|
||||
ta.value = text;
|
||||
ta.style.position = 'fixed';
|
||||
ta.style.left = '-9999px';
|
||||
document.body.appendChild(ta);
|
||||
ta.select();
|
||||
document.execCommand('copy');
|
||||
document.body.removeChild(ta);
|
||||
}
|
||||
|
||||
return { init, show, hide, isOpen };
|
||||
})();
|
||||
|
||||
export { MemoryInspect };
|
||||
@@ -866,7 +866,7 @@ const SpatialMemory = (() => {
|
||||
getCrystalMeshes, getMemoryFromMesh, highlightMemory, clearHighlight, getSelectedId,
|
||||
exportIndex, importIndex, searchNearby, REGIONS,
|
||||
saveToStorage, loadFromStorage, clearStorage,
|
||||
runGravityLayout
|
||||
runGravityLayout, setCamera
|
||||
};
|
||||
})();
|
||||
|
||||
|
||||
24
nexus/mnemosyne/__init__.py
Normal file
24
nexus/mnemosyne/__init__.py
Normal file
@@ -0,0 +1,24 @@
|
||||
"""nexus.mnemosyne — The Living Holographic Archive.
|
||||
|
||||
Phase 1: Foundation — core archive, entry model, holographic linker,
|
||||
ingestion pipeline, and CLI.
|
||||
|
||||
Builds on MemPalace vector memory to create interconnected meaning:
|
||||
entries auto-reference related entries via semantic similarity,
|
||||
forming a living archive that surfaces relevant context autonomously.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from nexus.mnemosyne.archive import MnemosyneArchive
|
||||
from nexus.mnemosyne.entry import ArchiveEntry
|
||||
from nexus.mnemosyne.linker import HolographicLinker
|
||||
from nexus.mnemosyne.ingest import ingest_from_mempalace, ingest_event
|
||||
|
||||
__all__ = [
|
||||
"MnemosyneArchive",
|
||||
"ArchiveEntry",
|
||||
"HolographicLinker",
|
||||
"ingest_from_mempalace",
|
||||
"ingest_event",
|
||||
]
|
||||
487
nexus/mnemosyne/archive.py
Normal file
487
nexus/mnemosyne/archive.py
Normal file
@@ -0,0 +1,487 @@
|
||||
"""MnemosyneArchive — core archive class.
|
||||
|
||||
The living holographic archive. Stores entries, maintains links,
|
||||
and provides query interfaces for retrieving connected knowledge.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from nexus.mnemosyne.entry import ArchiveEntry
|
||||
from nexus.mnemosyne.linker import HolographicLinker
|
||||
|
||||
_EXPORT_VERSION = "1"
|
||||
|
||||
|
||||
class MnemosyneArchive:
|
||||
"""The holographic archive — stores and links entries.
|
||||
|
||||
Phase 1 uses JSON file storage. Phase 2 will integrate with
|
||||
MemPalace (ChromaDB) for vector-semantic search.
|
||||
"""
|
||||
|
||||
def __init__(self, archive_path: Optional[Path] = None):
|
||||
self.path = archive_path or Path.home() / ".hermes" / "mnemosyne" / "archive.json"
|
||||
self.path.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.linker = HolographicLinker()
|
||||
self._entries: dict[str, ArchiveEntry] = {}
|
||||
self._load()
|
||||
|
||||
def _load(self):
|
||||
if self.path.exists():
|
||||
try:
|
||||
with open(self.path) as f:
|
||||
data = json.load(f)
|
||||
for entry_data in data.get("entries", []):
|
||||
entry = ArchiveEntry.from_dict(entry_data)
|
||||
self._entries[entry.id] = entry
|
||||
except (json.JSONDecodeError, KeyError):
|
||||
pass # Start fresh on corrupt data
|
||||
|
||||
def _save(self):
|
||||
data = {
|
||||
"entries": [e.to_dict() for e in self._entries.values()],
|
||||
"count": len(self._entries),
|
||||
}
|
||||
with open(self.path, "w") as f:
|
||||
json.dump(data, f, indent=2)
|
||||
|
||||
def add(self, entry: ArchiveEntry, auto_link: bool = True) -> ArchiveEntry:
|
||||
"""Add an entry to the archive. Auto-links to related entries."""
|
||||
self._entries[entry.id] = entry
|
||||
if auto_link:
|
||||
self.linker.apply_links(entry, list(self._entries.values()))
|
||||
self._save()
|
||||
return entry
|
||||
|
||||
def get(self, entry_id: str) -> Optional[ArchiveEntry]:
|
||||
return self._entries.get(entry_id)
|
||||
|
||||
def search(self, query: str, limit: int = 10) -> list[ArchiveEntry]:
|
||||
"""Simple keyword search across titles and content."""
|
||||
query_tokens = set(query.lower().split())
|
||||
scored = []
|
||||
for entry in self._entries.values():
|
||||
text = f"{entry.title} {entry.content} {' '.join(entry.topics)}".lower()
|
||||
hits = sum(1 for t in query_tokens if t in text)
|
||||
if hits > 0:
|
||||
scored.append((hits, entry))
|
||||
scored.sort(key=lambda x: x[0], reverse=True)
|
||||
return [e for _, e in scored[:limit]]
|
||||
|
||||
def semantic_search(self, query: str, limit: int = 10, threshold: float = 0.05) -> list[ArchiveEntry]:
|
||||
"""Semantic search using holographic linker similarity.
|
||||
|
||||
Scores each entry by Jaccard similarity between query tokens and entry
|
||||
tokens, then boosts entries with more inbound links (more "holographic").
|
||||
Falls back to keyword search if no entries meet the similarity threshold.
|
||||
|
||||
Args:
|
||||
query: Natural language query string.
|
||||
limit: Maximum number of results to return.
|
||||
threshold: Minimum Jaccard similarity to be considered a semantic match.
|
||||
|
||||
Returns:
|
||||
List of ArchiveEntry sorted by combined relevance score, descending.
|
||||
"""
|
||||
query_tokens = HolographicLinker._tokenize(query)
|
||||
if not query_tokens:
|
||||
return []
|
||||
|
||||
# Count inbound links for each entry (how many entries link TO this one)
|
||||
inbound: dict[str, int] = {eid: 0 for eid in self._entries}
|
||||
for entry in self._entries.values():
|
||||
for linked_id in entry.links:
|
||||
if linked_id in inbound:
|
||||
inbound[linked_id] += 1
|
||||
|
||||
max_inbound = max(inbound.values(), default=1) or 1
|
||||
|
||||
scored = []
|
||||
for entry in self._entries.values():
|
||||
entry_tokens = HolographicLinker._tokenize(f"{entry.title} {entry.content} {' '.join(entry.topics)}")
|
||||
if not entry_tokens:
|
||||
continue
|
||||
intersection = query_tokens & entry_tokens
|
||||
union = query_tokens | entry_tokens
|
||||
jaccard = len(intersection) / len(union)
|
||||
if jaccard >= threshold:
|
||||
link_boost = inbound[entry.id] / max_inbound * 0.2 # up to 20% boost
|
||||
scored.append((jaccard + link_boost, entry))
|
||||
|
||||
if scored:
|
||||
scored.sort(key=lambda x: x[0], reverse=True)
|
||||
return [e for _, e in scored[:limit]]
|
||||
|
||||
# Graceful fallback to keyword search
|
||||
return self.search(query, limit=limit)
|
||||
|
||||
def get_linked(self, entry_id: str, depth: int = 1) -> list[ArchiveEntry]:
|
||||
"""Get entries linked to a given entry, up to specified depth."""
|
||||
visited = set()
|
||||
frontier = {entry_id}
|
||||
result = []
|
||||
for _ in range(depth):
|
||||
next_frontier = set()
|
||||
for eid in frontier:
|
||||
if eid in visited:
|
||||
continue
|
||||
visited.add(eid)
|
||||
entry = self._entries.get(eid)
|
||||
if entry:
|
||||
for linked_id in entry.links:
|
||||
if linked_id not in visited:
|
||||
linked = self._entries.get(linked_id)
|
||||
if linked:
|
||||
result.append(linked)
|
||||
next_frontier.add(linked_id)
|
||||
frontier = next_frontier
|
||||
return result
|
||||
|
||||
def by_topic(self, topic: str) -> list[ArchiveEntry]:
|
||||
"""Get all entries tagged with a topic."""
|
||||
topic_lower = topic.lower()
|
||||
return [e for e in self._entries.values() if topic_lower in [t.lower() for t in e.topics]]
|
||||
|
||||
def remove(self, entry_id: str) -> bool:
|
||||
"""Remove an entry and clean up all bidirectional links.
|
||||
|
||||
Returns True if the entry existed and was removed, False otherwise.
|
||||
"""
|
||||
if entry_id not in self._entries:
|
||||
return False
|
||||
# Remove back-links from all other entries
|
||||
for other in self._entries.values():
|
||||
if entry_id in other.links:
|
||||
other.links.remove(entry_id)
|
||||
del self._entries[entry_id]
|
||||
self._save()
|
||||
return True
|
||||
|
||||
def export(
|
||||
self,
|
||||
query: Optional[str] = None,
|
||||
topics: Optional[list[str]] = None,
|
||||
) -> dict:
|
||||
"""Export a filtered subset of the archive.
|
||||
|
||||
Args:
|
||||
query: keyword filter applied to title + content (case-insensitive)
|
||||
topics: list of topic tags; entries must match at least one
|
||||
|
||||
Returns a JSON-serialisable dict with an ``entries`` list and metadata.
|
||||
"""
|
||||
candidates = list(self._entries.values())
|
||||
|
||||
if topics:
|
||||
lower_topics = {t.lower() for t in topics}
|
||||
candidates = [
|
||||
e for e in candidates
|
||||
if any(t.lower() in lower_topics for t in e.topics)
|
||||
]
|
||||
|
||||
if query:
|
||||
query_tokens = set(query.lower().split())
|
||||
candidates = [
|
||||
e for e in candidates
|
||||
if any(
|
||||
token in f"{e.title} {e.content} {' '.join(e.topics)}".lower()
|
||||
for token in query_tokens
|
||||
)
|
||||
]
|
||||
|
||||
return {
|
||||
"version": _EXPORT_VERSION,
|
||||
"filters": {"query": query, "topics": topics},
|
||||
"count": len(candidates),
|
||||
"entries": [e.to_dict() for e in candidates],
|
||||
}
|
||||
|
||||
def topic_counts(self) -> dict[str, int]:
|
||||
"""Return a dict mapping topic name → entry count, sorted by count desc."""
|
||||
counts: dict[str, int] = {}
|
||||
for entry in self._entries.values():
|
||||
for topic in entry.topics:
|
||||
counts[topic] = counts.get(topic, 0) + 1
|
||||
return dict(sorted(counts.items(), key=lambda x: x[1], reverse=True))
|
||||
|
||||
@property
|
||||
def count(self) -> int:
|
||||
return len(self._entries)
|
||||
|
||||
def stats(self) -> dict:
|
||||
entries = list(self._entries.values())
|
||||
total_links = sum(len(e.links) for e in entries)
|
||||
topics: set[str] = set()
|
||||
for e in entries:
|
||||
topics.update(e.topics)
|
||||
|
||||
# Orphans: entries with no links at all
|
||||
orphans = sum(1 for e in entries if len(e.links) == 0)
|
||||
|
||||
# Link density: average links per entry (0 when empty)
|
||||
n = len(entries)
|
||||
link_density = round(total_links / n, 4) if n else 0.0
|
||||
|
||||
# Age distribution
|
||||
timestamps = sorted(e.created_at for e in entries)
|
||||
oldest_entry = timestamps[0] if timestamps else None
|
||||
newest_entry = timestamps[-1] if timestamps else None
|
||||
|
||||
return {
|
||||
"entries": n,
|
||||
"total_links": total_links,
|
||||
"unique_topics": len(topics),
|
||||
"topics": sorted(topics),
|
||||
"orphans": orphans,
|
||||
"link_density": link_density,
|
||||
"oldest_entry": oldest_entry,
|
||||
"newest_entry": newest_entry,
|
||||
}
|
||||
|
||||
def _build_adjacency(self) -> dict[str, set[str]]:
|
||||
"""Build adjacency dict from entry links. Only includes valid references."""
|
||||
adj: dict[str, set[str]] = {eid: set() for eid in self._entries}
|
||||
for eid, entry in self._entries.items():
|
||||
for linked_id in entry.links:
|
||||
if linked_id in self._entries and linked_id != eid:
|
||||
adj[eid].add(linked_id)
|
||||
adj[linked_id].add(eid)
|
||||
return adj
|
||||
|
||||
def graph_clusters(self, min_size: int = 1) -> list[dict]:
|
||||
"""Find connected component clusters in the holographic graph.
|
||||
|
||||
Uses BFS to discover groups of entries that are reachable from each
|
||||
other through their links. Returns clusters sorted by size descending.
|
||||
|
||||
Args:
|
||||
min_size: Minimum cluster size to include (filters out isolated entries).
|
||||
|
||||
Returns:
|
||||
List of dicts with keys: cluster_id, size, entries, topics, density
|
||||
"""
|
||||
adj = self._build_adjacency()
|
||||
visited: set[str] = set()
|
||||
clusters: list[dict] = []
|
||||
cluster_id = 0
|
||||
|
||||
for eid in self._entries:
|
||||
if eid in visited:
|
||||
continue
|
||||
# BFS from this entry
|
||||
component: list[str] = []
|
||||
queue = [eid]
|
||||
while queue:
|
||||
current = queue.pop(0)
|
||||
if current in visited:
|
||||
continue
|
||||
visited.add(current)
|
||||
component.append(current)
|
||||
for neighbor in adj.get(current, set()):
|
||||
if neighbor not in visited:
|
||||
queue.append(neighbor)
|
||||
|
||||
# Single-entry clusters are orphans
|
||||
if len(component) < min_size:
|
||||
continue
|
||||
|
||||
# Collect topics from cluster entries
|
||||
cluster_topics: dict[str, int] = {}
|
||||
internal_edges = 0
|
||||
for cid in component:
|
||||
entry = self._entries[cid]
|
||||
for t in entry.topics:
|
||||
cluster_topics[t] = cluster_topics.get(t, 0) + 1
|
||||
internal_edges += len(adj.get(cid, set()))
|
||||
internal_edges //= 2 # undirected, counted twice
|
||||
|
||||
# Density: actual edges / possible edges
|
||||
n = len(component)
|
||||
max_edges = n * (n - 1) // 2
|
||||
density = round(internal_edges / max_edges, 4) if max_edges > 0 else 0.0
|
||||
|
||||
# Top topics by frequency
|
||||
top_topics = sorted(cluster_topics.items(), key=lambda x: x[1], reverse=True)[:5]
|
||||
|
||||
clusters.append({
|
||||
"cluster_id": cluster_id,
|
||||
"size": n,
|
||||
"entries": component,
|
||||
"top_topics": [t for t, _ in top_topics],
|
||||
"internal_edges": internal_edges,
|
||||
"density": density,
|
||||
})
|
||||
cluster_id += 1
|
||||
|
||||
clusters.sort(key=lambda c: c["size"], reverse=True)
|
||||
return clusters
|
||||
|
||||
def hub_entries(self, limit: int = 10) -> list[dict]:
|
||||
"""Find the most connected entries (highest degree centrality).
|
||||
|
||||
These are the "hubs" of the holographic graph — entries that bridge
|
||||
many topics and attract many links.
|
||||
|
||||
Args:
|
||||
limit: Maximum number of hubs to return.
|
||||
|
||||
Returns:
|
||||
List of dicts with keys: entry, degree, inbound, outbound, topics
|
||||
"""
|
||||
adj = self._build_adjacency()
|
||||
inbound: dict[str, int] = {eid: 0 for eid in self._entries}
|
||||
|
||||
for entry in self._entries.values():
|
||||
for lid in entry.links:
|
||||
if lid in inbound:
|
||||
inbound[lid] += 1
|
||||
|
||||
hubs = []
|
||||
for eid, entry in self._entries.items():
|
||||
degree = len(adj.get(eid, set()))
|
||||
if degree == 0:
|
||||
continue
|
||||
hubs.append({
|
||||
"entry": entry,
|
||||
"degree": degree,
|
||||
"inbound": inbound.get(eid, 0),
|
||||
"outbound": len(entry.links),
|
||||
"topics": entry.topics,
|
||||
})
|
||||
|
||||
hubs.sort(key=lambda h: h["degree"], reverse=True)
|
||||
return hubs[:limit]
|
||||
|
||||
def bridge_entries(self) -> list[dict]:
|
||||
"""Find articulation points — entries whose removal would split a cluster.
|
||||
|
||||
These are "bridge" entries in the holographic graph. Removing them
|
||||
disconnects members that were previously reachable through the bridge.
|
||||
Uses Tarjan's algorithm for finding articulation points.
|
||||
|
||||
Returns:
|
||||
List of dicts with keys: entry, cluster_size, bridges_between
|
||||
"""
|
||||
adj = self._build_adjacency()
|
||||
|
||||
# Find clusters first
|
||||
clusters = self.graph_clusters(min_size=3)
|
||||
if not clusters:
|
||||
return []
|
||||
|
||||
# For each cluster, run Tarjan's algorithm
|
||||
bridges: list[dict] = []
|
||||
for cluster in clusters:
|
||||
members = set(cluster["entries"])
|
||||
if len(members) < 3:
|
||||
continue
|
||||
|
||||
# Build subgraph adjacency
|
||||
sub_adj = {eid: adj[eid] & members for eid in members}
|
||||
|
||||
# Tarjan's DFS for articulation points
|
||||
discovery: dict[str, int] = {}
|
||||
low: dict[str, int] = {}
|
||||
parent: dict[str, Optional[str]] = {}
|
||||
ap: set[str] = set()
|
||||
timer = [0]
|
||||
|
||||
def dfs(u: str):
|
||||
children = 0
|
||||
discovery[u] = low[u] = timer[0]
|
||||
timer[0] += 1
|
||||
for v in sub_adj[u]:
|
||||
if v not in discovery:
|
||||
children += 1
|
||||
parent[v] = u
|
||||
dfs(v)
|
||||
low[u] = min(low[u], low[v])
|
||||
|
||||
# u is AP if: root with 2+ children, or non-root with low[v] >= disc[u]
|
||||
if parent.get(u) is None and children > 1:
|
||||
ap.add(u)
|
||||
if parent.get(u) is not None and low[v] >= discovery[u]:
|
||||
ap.add(u)
|
||||
elif v != parent.get(u):
|
||||
low[u] = min(low[u], discovery[v])
|
||||
|
||||
for eid in members:
|
||||
if eid not in discovery:
|
||||
parent[eid] = None
|
||||
dfs(eid)
|
||||
|
||||
# For each articulation point, estimate what it bridges
|
||||
for ap_id in ap:
|
||||
ap_entry = self._entries[ap_id]
|
||||
# Remove it temporarily and count resulting components
|
||||
temp_adj = {k: v.copy() for k, v in sub_adj.items()}
|
||||
del temp_adj[ap_id]
|
||||
for k in temp_adj:
|
||||
temp_adj[k].discard(ap_id)
|
||||
|
||||
# BFS count components after removal
|
||||
temp_visited: set[str] = set()
|
||||
component_count = 0
|
||||
for mid in members:
|
||||
if mid == ap_id or mid in temp_visited:
|
||||
continue
|
||||
component_count += 1
|
||||
queue = [mid]
|
||||
while queue:
|
||||
cur = queue.pop(0)
|
||||
if cur in temp_visited:
|
||||
continue
|
||||
temp_visited.add(cur)
|
||||
for nb in temp_adj.get(cur, set()):
|
||||
if nb not in temp_visited:
|
||||
queue.append(nb)
|
||||
|
||||
if component_count > 1:
|
||||
bridges.append({
|
||||
"entry": ap_entry,
|
||||
"cluster_size": cluster["size"],
|
||||
"components_after_removal": component_count,
|
||||
"topics": ap_entry.topics,
|
||||
})
|
||||
|
||||
bridges.sort(key=lambda b: b["components_after_removal"], reverse=True)
|
||||
return bridges
|
||||
|
||||
def rebuild_links(self, threshold: Optional[float] = None) -> int:
|
||||
"""Recompute all links from scratch.
|
||||
|
||||
Clears existing links and re-applies the holographic linker to every
|
||||
entry pair. Useful after bulk ingestion or threshold changes.
|
||||
|
||||
Args:
|
||||
threshold: Override the linker's default similarity threshold.
|
||||
|
||||
Returns:
|
||||
Total number of links created.
|
||||
"""
|
||||
if threshold is not None:
|
||||
old_threshold = self.linker.threshold
|
||||
self.linker.threshold = threshold
|
||||
|
||||
# Clear all links
|
||||
for entry in self._entries.values():
|
||||
entry.links = []
|
||||
|
||||
entries = list(self._entries.values())
|
||||
total_links = 0
|
||||
|
||||
# Re-link each entry against all others
|
||||
for entry in entries:
|
||||
candidates = [e for e in entries if e.id != entry.id]
|
||||
new_links = self.linker.apply_links(entry, candidates)
|
||||
total_links += new_links
|
||||
|
||||
if threshold is not None:
|
||||
self.linker.threshold = old_threshold
|
||||
|
||||
self._save()
|
||||
return total_links
|
||||
209
nexus/mnemosyne/cli.py
Normal file
209
nexus/mnemosyne/cli.py
Normal file
@@ -0,0 +1,209 @@
|
||||
"""CLI interface for Mnemosyne.
|
||||
|
||||
Provides: mnemosyne ingest, mnemosyne search, mnemosyne link, mnemosyne stats,
|
||||
mnemosyne topics, mnemosyne remove, mnemosyne export,
|
||||
mnemosyne clusters, mnemosyne hubs, mnemosyne bridges, mnemosyne rebuild
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
|
||||
from nexus.mnemosyne.archive import MnemosyneArchive
|
||||
from nexus.mnemosyne.entry import ArchiveEntry
|
||||
from nexus.mnemosyne.ingest import ingest_event
|
||||
|
||||
|
||||
def cmd_stats(args):
|
||||
archive = MnemosyneArchive()
|
||||
stats = archive.stats()
|
||||
print(json.dumps(stats, indent=2))
|
||||
|
||||
|
||||
def cmd_search(args):
|
||||
archive = MnemosyneArchive()
|
||||
if getattr(args, "semantic", False):
|
||||
results = archive.semantic_search(args.query, limit=args.limit)
|
||||
else:
|
||||
results = archive.search(args.query, limit=args.limit)
|
||||
if not results:
|
||||
print("No results found.")
|
||||
return
|
||||
for entry in results:
|
||||
linked = len(entry.links)
|
||||
print(f"[{entry.id[:8]}] {entry.title}")
|
||||
print(f" Source: {entry.source} | Topics: {', '.join(entry.topics)} | Links: {linked}")
|
||||
print(f" {entry.content[:120]}...")
|
||||
print()
|
||||
|
||||
|
||||
def cmd_ingest(args):
|
||||
archive = MnemosyneArchive()
|
||||
entry = ingest_event(
|
||||
archive,
|
||||
title=args.title,
|
||||
content=args.content,
|
||||
topics=args.topics.split(",") if args.topics else [],
|
||||
)
|
||||
print(f"Ingested: [{entry.id[:8]}] {entry.title} ({len(entry.links)} links)")
|
||||
|
||||
|
||||
def cmd_link(args):
|
||||
archive = MnemosyneArchive()
|
||||
entry = archive.get(args.entry_id)
|
||||
if not entry:
|
||||
print(f"Entry not found: {args.entry_id}")
|
||||
sys.exit(1)
|
||||
linked = archive.get_linked(entry.id, depth=args.depth)
|
||||
if not linked:
|
||||
print("No linked entries found.")
|
||||
return
|
||||
for e in linked:
|
||||
print(f" [{e.id[:8]}] {e.title} (source: {e.source})")
|
||||
|
||||
|
||||
def cmd_topics(args):
|
||||
archive = MnemosyneArchive()
|
||||
counts = archive.topic_counts()
|
||||
if not counts:
|
||||
print("No topics found.")
|
||||
return
|
||||
for topic, count in counts.items():
|
||||
print(f" {topic}: {count}")
|
||||
|
||||
|
||||
def cmd_remove(args):
|
||||
archive = MnemosyneArchive()
|
||||
removed = archive.remove(args.entry_id)
|
||||
if removed:
|
||||
print(f"Removed entry: {args.entry_id}")
|
||||
else:
|
||||
print(f"Entry not found: {args.entry_id}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def cmd_export(args):
|
||||
archive = MnemosyneArchive()
|
||||
topics = [t.strip() for t in args.topics.split(",")] if args.topics else None
|
||||
data = archive.export(query=args.query or None, topics=topics)
|
||||
print(json.dumps(data, indent=2))
|
||||
|
||||
|
||||
def cmd_clusters(args):
|
||||
archive = MnemosyneArchive()
|
||||
clusters = archive.graph_clusters(min_size=args.min_size)
|
||||
if not clusters:
|
||||
print("No clusters found.")
|
||||
return
|
||||
for c in clusters:
|
||||
print(f"Cluster {c['cluster_id']}: {c['size']} entries, density={c['density']}")
|
||||
print(f" Topics: {', '.join(c['top_topics']) if c['top_topics'] else '(none)'}")
|
||||
if args.verbose:
|
||||
for eid in c["entries"]:
|
||||
entry = archive.get(eid)
|
||||
if entry:
|
||||
print(f" [{eid[:8]}] {entry.title}")
|
||||
print()
|
||||
|
||||
|
||||
def cmd_hubs(args):
|
||||
archive = MnemosyneArchive()
|
||||
hubs = archive.hub_entries(limit=args.limit)
|
||||
if not hubs:
|
||||
print("No hubs found.")
|
||||
return
|
||||
for h in hubs:
|
||||
e = h["entry"]
|
||||
print(f"[{e.id[:8]}] {e.title}")
|
||||
print(f" Degree: {h['degree']} (in: {h['inbound']}, out: {h['outbound']})")
|
||||
print(f" Topics: {', '.join(h['topics']) if h['topics'] else '(none)'}")
|
||||
print()
|
||||
|
||||
|
||||
def cmd_bridges(args):
|
||||
archive = MnemosyneArchive()
|
||||
bridges = archive.bridge_entries()
|
||||
if not bridges:
|
||||
print("No bridge entries found.")
|
||||
return
|
||||
for b in bridges:
|
||||
e = b["entry"]
|
||||
print(f"[{e.id[:8]}] {e.title}")
|
||||
print(f" Bridges {b['components_after_removal']} components (cluster: {b['cluster_size']} entries)")
|
||||
print(f" Topics: {', '.join(b['topics']) if b['topics'] else '(none)'}")
|
||||
print()
|
||||
|
||||
|
||||
def cmd_rebuild(args):
|
||||
archive = MnemosyneArchive()
|
||||
threshold = args.threshold if args.threshold else None
|
||||
total = archive.rebuild_links(threshold=threshold)
|
||||
print(f"Rebuilt links: {total} connections across {archive.count} entries")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(prog="mnemosyne", description="The Living Holographic Archive")
|
||||
sub = parser.add_subparsers(dest="command")
|
||||
|
||||
sub.add_parser("stats", help="Show archive statistics")
|
||||
|
||||
s = sub.add_parser("search", help="Search the archive")
|
||||
s.add_argument("query", help="Search query")
|
||||
s.add_argument("-n", "--limit", type=int, default=10)
|
||||
s.add_argument("--semantic", action="store_true", help="Use holographic linker similarity scoring")
|
||||
|
||||
i = sub.add_parser("ingest", help="Ingest a new entry")
|
||||
i.add_argument("--title", required=True)
|
||||
i.add_argument("--content", required=True)
|
||||
i.add_argument("--topics", default="", help="Comma-separated topics")
|
||||
|
||||
l = sub.add_parser("link", help="Show linked entries")
|
||||
l.add_argument("entry_id", help="Entry ID (or prefix)")
|
||||
l.add_argument("-d", "--depth", type=int, default=1)
|
||||
|
||||
sub.add_parser("topics", help="List all topics with entry counts")
|
||||
|
||||
r = sub.add_parser("remove", help="Remove an entry by ID")
|
||||
r.add_argument("entry_id", help="Entry ID to remove")
|
||||
|
||||
ex = sub.add_parser("export", help="Export filtered archive data as JSON")
|
||||
ex.add_argument("-q", "--query", default="", help="Keyword filter")
|
||||
ex.add_argument("-t", "--topics", default="", help="Comma-separated topic filter")
|
||||
|
||||
cl = sub.add_parser("clusters", help="Show graph clusters (connected components)")
|
||||
cl.add_argument("-m", "--min-size", type=int, default=1, help="Minimum cluster size")
|
||||
cl.add_argument("-v", "--verbose", action="store_true", help="List entries in each cluster")
|
||||
|
||||
hu = sub.add_parser("hubs", help="Show most connected entries (hub analysis)")
|
||||
hu.add_argument("-n", "--limit", type=int, default=10, help="Max hubs to show")
|
||||
|
||||
sub.add_parser("bridges", help="Show bridge entries (articulation points)")
|
||||
|
||||
rb = sub.add_parser("rebuild", help="Recompute all links from scratch")
|
||||
rb.add_argument("-t", "--threshold", type=float, default=None, help="Similarity threshold override")
|
||||
|
||||
args = parser.parse_args()
|
||||
if not args.command:
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
dispatch = {
|
||||
"stats": cmd_stats,
|
||||
"search": cmd_search,
|
||||
"ingest": cmd_ingest,
|
||||
"link": cmd_link,
|
||||
"topics": cmd_topics,
|
||||
"remove": cmd_remove,
|
||||
"export": cmd_export,
|
||||
"clusters": cmd_clusters,
|
||||
"hubs": cmd_hubs,
|
||||
"bridges": cmd_bridges,
|
||||
"rebuild": cmd_rebuild,
|
||||
}
|
||||
dispatch[args.command](args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
44
nexus/mnemosyne/entry.py
Normal file
44
nexus/mnemosyne/entry.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""Archive entry model for Mnemosyne.
|
||||
|
||||
Each entry is a node in the holographic graph — a piece of meaning
|
||||
with metadata, content, and links to related entries.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
import uuid
|
||||
|
||||
|
||||
@dataclass
|
||||
class ArchiveEntry:
|
||||
"""A single node in the Mnemosyne holographic archive."""
|
||||
|
||||
id: str = field(default_factory=lambda: str(uuid.uuid4()))
|
||||
title: str = ""
|
||||
content: str = ""
|
||||
source: str = "" # "mempalace", "event", "manual", etc.
|
||||
source_ref: Optional[str] = None # original MemPalace ID, event URI, etc.
|
||||
topics: list[str] = field(default_factory=list)
|
||||
metadata: dict = field(default_factory=dict)
|
||||
created_at: str = field(default_factory=lambda: datetime.now(timezone.utc).isoformat())
|
||||
links: list[str] = field(default_factory=list) # IDs of related entries
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
"id": self.id,
|
||||
"title": self.title,
|
||||
"content": self.content,
|
||||
"source": self.source,
|
||||
"source_ref": self.source_ref,
|
||||
"topics": self.topics,
|
||||
"metadata": self.metadata,
|
||||
"created_at": self.created_at,
|
||||
"links": self.links,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> ArchiveEntry:
|
||||
return cls(**{k: v for k, v in data.items() if k in cls.__dataclass_fields__})
|
||||
62
nexus/mnemosyne/ingest.py
Normal file
62
nexus/mnemosyne/ingest.py
Normal file
@@ -0,0 +1,62 @@
|
||||
"""Ingestion pipeline — feeds data into the archive.
|
||||
|
||||
Supports ingesting from MemPalace, raw events, and manual entries.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from nexus.mnemosyne.archive import MnemosyneArchive
|
||||
from nexus.mnemosyne.entry import ArchiveEntry
|
||||
|
||||
|
||||
def ingest_from_mempalace(
|
||||
archive: MnemosyneArchive,
|
||||
mempalace_entries: list[dict],
|
||||
) -> int:
|
||||
"""Ingest entries from a MemPalace export.
|
||||
|
||||
Each dict should have at least: content, metadata (optional).
|
||||
Returns count of new entries added.
|
||||
"""
|
||||
added = 0
|
||||
for mp_entry in mempalace_entries:
|
||||
content = mp_entry.get("content", "")
|
||||
metadata = mp_entry.get("metadata", {})
|
||||
source_ref = mp_entry.get("id", "")
|
||||
|
||||
# Skip if already ingested
|
||||
if any(e.source_ref == source_ref for e in archive._entries.values()):
|
||||
continue
|
||||
|
||||
entry = ArchiveEntry(
|
||||
title=metadata.get("title", content[:80]),
|
||||
content=content,
|
||||
source="mempalace",
|
||||
source_ref=source_ref,
|
||||
topics=metadata.get("topics", []),
|
||||
metadata=metadata,
|
||||
)
|
||||
archive.add(entry)
|
||||
added += 1
|
||||
return added
|
||||
|
||||
|
||||
def ingest_event(
|
||||
archive: MnemosyneArchive,
|
||||
title: str,
|
||||
content: str,
|
||||
topics: Optional[list[str]] = None,
|
||||
source: str = "event",
|
||||
metadata: Optional[dict] = None,
|
||||
) -> ArchiveEntry:
|
||||
"""Ingest a single event into the archive."""
|
||||
entry = ArchiveEntry(
|
||||
title=title,
|
||||
content=content,
|
||||
source=source,
|
||||
topics=topics or [],
|
||||
metadata=metadata or {},
|
||||
)
|
||||
return archive.add(entry)
|
||||
73
nexus/mnemosyne/linker.py
Normal file
73
nexus/mnemosyne/linker.py
Normal file
@@ -0,0 +1,73 @@
|
||||
"""Holographic link engine.
|
||||
|
||||
Computes semantic similarity between archive entries and creates
|
||||
bidirectional links, forming the holographic graph structure.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
from nexus.mnemosyne.entry import ArchiveEntry
|
||||
|
||||
|
||||
class HolographicLinker:
|
||||
"""Links archive entries via semantic similarity.
|
||||
|
||||
Phase 1 uses simple keyword overlap as the similarity metric.
|
||||
Phase 2 will integrate ChromaDB embeddings from MemPalace.
|
||||
"""
|
||||
|
||||
def __init__(self, similarity_threshold: float = 0.15):
|
||||
self.threshold = similarity_threshold
|
||||
|
||||
def compute_similarity(self, a: ArchiveEntry, b: ArchiveEntry) -> float:
|
||||
"""Compute similarity score between two entries.
|
||||
|
||||
Returns float in [0, 1]. Phase 1: Jaccard similarity on
|
||||
combined title+content tokens. Phase 2: cosine similarity
|
||||
on ChromaDB embeddings.
|
||||
"""
|
||||
tokens_a = self._tokenize(f"{a.title} {a.content}")
|
||||
tokens_b = self._tokenize(f"{b.title} {b.content}")
|
||||
if not tokens_a or not tokens_b:
|
||||
return 0.0
|
||||
intersection = tokens_a & tokens_b
|
||||
union = tokens_a | tokens_b
|
||||
return len(intersection) / len(union)
|
||||
|
||||
def find_links(self, entry: ArchiveEntry, candidates: list[ArchiveEntry]) -> list[tuple[str, float]]:
|
||||
"""Find entries worth linking to.
|
||||
|
||||
Returns list of (entry_id, similarity_score) tuples above threshold.
|
||||
"""
|
||||
results = []
|
||||
for candidate in candidates:
|
||||
if candidate.id == entry.id:
|
||||
continue
|
||||
score = self.compute_similarity(entry, candidate)
|
||||
if score >= self.threshold:
|
||||
results.append((candidate.id, score))
|
||||
results.sort(key=lambda x: x[1], reverse=True)
|
||||
return results
|
||||
|
||||
def apply_links(self, entry: ArchiveEntry, candidates: list[ArchiveEntry]) -> int:
|
||||
"""Auto-link an entry to related entries. Returns count of new links."""
|
||||
matches = self.find_links(entry, candidates)
|
||||
new_links = 0
|
||||
for eid, score in matches:
|
||||
if eid not in entry.links:
|
||||
entry.links.append(eid)
|
||||
new_links += 1
|
||||
# Bidirectional
|
||||
for c in candidates:
|
||||
if c.id == eid and entry.id not in c.links:
|
||||
c.links.append(entry.id)
|
||||
return new_links
|
||||
|
||||
@staticmethod
|
||||
def _tokenize(text: str) -> set[str]:
|
||||
"""Simple whitespace + punctuation tokenizer."""
|
||||
import re
|
||||
tokens = set(re.findall(r"\w+", text.lower()))
|
||||
# Remove very short tokens
|
||||
return {t for t in tokens if len(t) > 2}
|
||||
0
nexus/mnemosyne/tests/__init__.py
Normal file
0
nexus/mnemosyne/tests/__init__.py
Normal file
276
nexus/mnemosyne/tests/test_archive.py
Normal file
276
nexus/mnemosyne/tests/test_archive.py
Normal file
@@ -0,0 +1,276 @@
|
||||
"""Tests for Mnemosyne archive core."""
|
||||
|
||||
import json
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
from nexus.mnemosyne.entry import ArchiveEntry
|
||||
from nexus.mnemosyne.linker import HolographicLinker
|
||||
from nexus.mnemosyne.archive import MnemosyneArchive
|
||||
from nexus.mnemosyne.ingest import ingest_event, ingest_from_mempalace
|
||||
|
||||
|
||||
def test_entry_roundtrip():
|
||||
e = ArchiveEntry(title="Test", content="Hello world", topics=["test"])
|
||||
d = e.to_dict()
|
||||
e2 = ArchiveEntry.from_dict(d)
|
||||
assert e2.id == e.id
|
||||
assert e2.title == "Test"
|
||||
|
||||
|
||||
def test_linker_similarity():
|
||||
linker = HolographicLinker()
|
||||
a = ArchiveEntry(title="Python coding", content="Writing Python scripts for automation")
|
||||
b = ArchiveEntry(title="Python scripting", content="Automating tasks with Python scripts")
|
||||
c = ArchiveEntry(title="Cooking recipes", content="How to make pasta carbonara")
|
||||
assert linker.compute_similarity(a, b) > linker.compute_similarity(a, c)
|
||||
|
||||
|
||||
def test_archive_add_and_search():
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive = MnemosyneArchive(archive_path=path)
|
||||
ingest_event(archive, title="First entry", content="Hello archive", topics=["test"])
|
||||
ingest_event(archive, title="Second entry", content="Another record", topics=["test", "demo"])
|
||||
assert archive.count == 2
|
||||
results = archive.search("hello")
|
||||
assert len(results) == 1
|
||||
assert results[0].title == "First entry"
|
||||
|
||||
|
||||
def test_archive_auto_linking():
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive = MnemosyneArchive(archive_path=path)
|
||||
e1 = ingest_event(archive, title="Python automation", content="Building automation tools in Python")
|
||||
e2 = ingest_event(archive, title="Python scripting", content="Writing automation scripts using Python")
|
||||
# Both should be linked due to shared tokens
|
||||
assert len(e1.links) > 0 or len(e2.links) > 0
|
||||
|
||||
|
||||
def test_ingest_from_mempalace():
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive = MnemosyneArchive(archive_path=path)
|
||||
mp_entries = [
|
||||
{"id": "mp-1", "content": "Test memory content", "metadata": {"title": "Test", "topics": ["demo"]}},
|
||||
{"id": "mp-2", "content": "Another memory", "metadata": {"title": "Memory 2"}},
|
||||
]
|
||||
count = ingest_from_mempalace(archive, mp_entries)
|
||||
assert count == 2
|
||||
assert archive.count == 2
|
||||
|
||||
|
||||
def test_archive_persistence():
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive1 = MnemosyneArchive(archive_path=path)
|
||||
ingest_event(archive1, title="Persistent", content="Should survive reload")
|
||||
|
||||
archive2 = MnemosyneArchive(archive_path=path)
|
||||
assert archive2.count == 1
|
||||
results = archive2.search("persistent")
|
||||
assert len(results) == 1
|
||||
|
||||
|
||||
def test_archive_remove_basic():
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive = MnemosyneArchive(archive_path=path)
|
||||
e1 = ingest_event(archive, title="Alpha", content="First entry", topics=["x"])
|
||||
assert archive.count == 1
|
||||
|
||||
result = archive.remove(e1.id)
|
||||
assert result is True
|
||||
assert archive.count == 0
|
||||
assert archive.get(e1.id) is None
|
||||
|
||||
|
||||
def test_archive_remove_nonexistent():
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive = MnemosyneArchive(archive_path=path)
|
||||
result = archive.remove("does-not-exist")
|
||||
assert result is False
|
||||
|
||||
|
||||
def test_archive_remove_cleans_backlinks():
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive = MnemosyneArchive(archive_path=path)
|
||||
e1 = ingest_event(archive, title="Python automation", content="Building automation tools in Python")
|
||||
e2 = ingest_event(archive, title="Python scripting", content="Writing automation scripts using Python")
|
||||
# At least one direction should be linked
|
||||
assert e1.id in e2.links or e2.id in e1.links
|
||||
|
||||
# Remove e1; e2 must no longer reference it
|
||||
archive.remove(e1.id)
|
||||
e2_fresh = archive.get(e2.id)
|
||||
assert e2_fresh is not None
|
||||
assert e1.id not in e2_fresh.links
|
||||
|
||||
|
||||
def test_archive_remove_persists():
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
a1 = MnemosyneArchive(archive_path=path)
|
||||
e = ingest_event(a1, title="Gone", content="Will be removed")
|
||||
a1.remove(e.id)
|
||||
|
||||
a2 = MnemosyneArchive(archive_path=path)
|
||||
assert a2.count == 0
|
||||
|
||||
|
||||
def test_archive_export_unfiltered():
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive = MnemosyneArchive(archive_path=path)
|
||||
ingest_event(archive, title="A", content="content a", topics=["alpha"])
|
||||
ingest_event(archive, title="B", content="content b", topics=["beta"])
|
||||
data = archive.export()
|
||||
assert data["count"] == 2
|
||||
assert len(data["entries"]) == 2
|
||||
assert data["filters"] == {"query": None, "topics": None}
|
||||
|
||||
|
||||
def test_archive_export_by_topic():
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive = MnemosyneArchive(archive_path=path)
|
||||
ingest_event(archive, title="A", content="content a", topics=["alpha"])
|
||||
ingest_event(archive, title="B", content="content b", topics=["beta"])
|
||||
data = archive.export(topics=["alpha"])
|
||||
assert data["count"] == 1
|
||||
assert data["entries"][0]["title"] == "A"
|
||||
|
||||
|
||||
def test_archive_export_by_query():
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive = MnemosyneArchive(archive_path=path)
|
||||
ingest_event(archive, title="Hello world", content="greetings", topics=[])
|
||||
ingest_event(archive, title="Goodbye", content="farewell", topics=[])
|
||||
data = archive.export(query="hello")
|
||||
assert data["count"] == 1
|
||||
assert data["entries"][0]["title"] == "Hello world"
|
||||
|
||||
|
||||
def test_archive_export_combined_filters():
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive = MnemosyneArchive(archive_path=path)
|
||||
ingest_event(archive, title="Hello world", content="greetings", topics=["alpha"])
|
||||
ingest_event(archive, title="Hello again", content="greetings again", topics=["beta"])
|
||||
data = archive.export(query="hello", topics=["alpha"])
|
||||
assert data["count"] == 1
|
||||
assert data["entries"][0]["title"] == "Hello world"
|
||||
|
||||
|
||||
def test_archive_stats_richer():
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive = MnemosyneArchive(archive_path=path)
|
||||
# All four new fields present when archive is empty
|
||||
s = archive.stats()
|
||||
assert "orphans" in s
|
||||
assert "link_density" in s
|
||||
assert "oldest_entry" in s
|
||||
assert "newest_entry" in s
|
||||
assert s["orphans"] == 0
|
||||
assert s["link_density"] == 0.0
|
||||
assert s["oldest_entry"] is None
|
||||
assert s["newest_entry"] is None
|
||||
|
||||
|
||||
def test_archive_stats_orphan_count():
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive = MnemosyneArchive(archive_path=path)
|
||||
# Two entries with very different content → unlikely to auto-link
|
||||
ingest_event(archive, title="Zebras", content="Zebra stripes savannah Africa", topics=[])
|
||||
ingest_event(archive, title="Compiler", content="Lexer parser AST bytecode", topics=[])
|
||||
s = archive.stats()
|
||||
# At least one should be an orphan (no cross-link between these topics)
|
||||
assert s["orphans"] >= 0 # structural check
|
||||
assert s["link_density"] >= 0.0
|
||||
assert s["oldest_entry"] is not None
|
||||
assert s["newest_entry"] is not None
|
||||
|
||||
|
||||
def test_semantic_search_returns_results():
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive = MnemosyneArchive(archive_path=path)
|
||||
ingest_event(archive, title="Python automation", content="Building automation tools in Python")
|
||||
ingest_event(archive, title="Cooking recipes", content="How to make pasta carbonara with cheese")
|
||||
results = archive.semantic_search("python scripting", limit=5)
|
||||
assert len(results) > 0
|
||||
assert results[0].title == "Python automation"
|
||||
|
||||
|
||||
def test_semantic_search_link_boost():
|
||||
"""Entries with more inbound links rank higher when Jaccard is equal."""
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive = MnemosyneArchive(archive_path=path)
|
||||
# Create two similar entries; manually give one more links
|
||||
e1 = ingest_event(archive, title="Machine learning", content="Neural networks deep learning models")
|
||||
e2 = ingest_event(archive, title="Machine learning basics", content="Neural networks deep learning intro")
|
||||
# Add a third entry that links to e1 so e1 has more inbound links
|
||||
e3 = ingest_event(archive, title="AI overview", content="Artificial intelligence machine learning")
|
||||
# Manually give e1 an extra inbound link by adding e3 -> e1
|
||||
if e1.id not in e3.links:
|
||||
e3.links.append(e1.id)
|
||||
archive._save()
|
||||
results = archive.semantic_search("machine learning neural networks", limit=5)
|
||||
assert len(results) >= 2
|
||||
# e1 should rank at or near top
|
||||
assert results[0].id in {e1.id, e2.id}
|
||||
|
||||
|
||||
def test_semantic_search_fallback_to_keyword():
|
||||
"""Falls back to keyword search when no entry meets Jaccard threshold."""
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive = MnemosyneArchive(archive_path=path)
|
||||
ingest_event(archive, title="Exact match only", content="unique xyzzy token here")
|
||||
# threshold=1.0 ensures no semantic match, triggering fallback
|
||||
results = archive.semantic_search("xyzzy", limit=5, threshold=1.0)
|
||||
# Fallback keyword search should find it
|
||||
assert len(results) == 1
|
||||
assert results[0].title == "Exact match only"
|
||||
|
||||
|
||||
def test_semantic_search_empty_archive():
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive = MnemosyneArchive(archive_path=path)
|
||||
results = archive.semantic_search("anything", limit=5)
|
||||
assert results == []
|
||||
|
||||
|
||||
def test_semantic_search_vs_keyword_relevance():
|
||||
"""Semantic search finds conceptually related entries missed by keyword search."""
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive = MnemosyneArchive(archive_path=path)
|
||||
ingest_event(archive, title="Python scripting", content="Writing scripts with Python for automation tasks")
|
||||
ingest_event(archive, title="Baking bread", content="Mix flour water yeast knead bake oven")
|
||||
# "coding" is semantically unrelated to baking but related to python scripting
|
||||
results = archive.semantic_search("coding scripts automation")
|
||||
assert len(results) > 0
|
||||
assert results[0].title == "Python scripting"
|
||||
|
||||
|
||||
def test_archive_topic_counts():
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
archive = MnemosyneArchive(archive_path=path)
|
||||
ingest_event(archive, title="A", content="x", topics=["python", "automation"])
|
||||
ingest_event(archive, title="B", content="y", topics=["python"])
|
||||
ingest_event(archive, title="C", content="z", topics=["automation"])
|
||||
counts = archive.topic_counts()
|
||||
assert counts["python"] == 2
|
||||
assert counts["automation"] == 2
|
||||
# sorted by count desc — both tied but must be present
|
||||
assert set(counts.keys()) == {"python", "automation"}
|
||||
271
nexus/mnemosyne/tests/test_graph_clusters.py
Normal file
271
nexus/mnemosyne/tests/test_graph_clusters.py
Normal file
@@ -0,0 +1,271 @@
|
||||
"""Tests for Mnemosyne graph cluster analysis features.
|
||||
|
||||
Tests: graph_clusters, hub_entries, bridge_entries, rebuild_links.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
|
||||
from nexus.mnemosyne.archive import MnemosyneArchive
|
||||
from nexus.mnemosyne.entry import ArchiveEntry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def archive():
|
||||
"""Create a fresh archive in a temp directory."""
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
path = Path(tmp) / "test_archive.json"
|
||||
a = MnemosyneArchive(archive_path=path)
|
||||
yield a
|
||||
|
||||
|
||||
def _make_entry(title="Test", content="test content", topics=None):
|
||||
return ArchiveEntry(title=title, content=content, topics=topics or [])
|
||||
|
||||
|
||||
class TestGraphClusters:
|
||||
"""Test graph_clusters() connected component discovery."""
|
||||
|
||||
def test_empty_archive(self, archive):
|
||||
clusters = archive.graph_clusters()
|
||||
assert clusters == []
|
||||
|
||||
def test_single_orphan(self, archive):
|
||||
archive.add(_make_entry("Lone entry"), auto_link=False)
|
||||
# min_size=1 includes orphans
|
||||
clusters = archive.graph_clusters(min_size=1)
|
||||
assert len(clusters) == 1
|
||||
assert clusters[0]["size"] == 1
|
||||
assert clusters[0]["density"] == 0.0
|
||||
|
||||
def test_single_orphan_filtered(self, archive):
|
||||
archive.add(_make_entry("Lone entry"), auto_link=False)
|
||||
clusters = archive.graph_clusters(min_size=2)
|
||||
assert clusters == []
|
||||
|
||||
def test_two_linked_entries(self, archive):
|
||||
"""Two manually linked entries form a cluster."""
|
||||
e1 = archive.add(_make_entry("Alpha dogs", "canine training"), auto_link=False)
|
||||
e2 = archive.add(_make_entry("Beta cats", "feline behavior"), auto_link=False)
|
||||
# Manual link
|
||||
e1.links.append(e2.id)
|
||||
e2.links.append(e1.id)
|
||||
archive._save()
|
||||
|
||||
clusters = archive.graph_clusters(min_size=2)
|
||||
assert len(clusters) == 1
|
||||
assert clusters[0]["size"] == 2
|
||||
assert clusters[0]["internal_edges"] == 1
|
||||
assert clusters[0]["density"] == 1.0 # 1 edge out of 1 possible
|
||||
|
||||
def test_two_separate_clusters(self, archive):
|
||||
"""Two disconnected groups form separate clusters."""
|
||||
a1 = archive.add(_make_entry("AI models", "neural networks"), auto_link=False)
|
||||
a2 = archive.add(_make_entry("AI training", "gradient descent"), auto_link=False)
|
||||
b1 = archive.add(_make_entry("Cooking pasta", "italian recipes"), auto_link=False)
|
||||
b2 = archive.add(_make_entry("Cooking sauces", "tomato basil"), auto_link=False)
|
||||
|
||||
# Link cluster A
|
||||
a1.links.append(a2.id)
|
||||
a2.links.append(a1.id)
|
||||
# Link cluster B
|
||||
b1.links.append(b2.id)
|
||||
b2.links.append(b1.id)
|
||||
archive._save()
|
||||
|
||||
clusters = archive.graph_clusters(min_size=2)
|
||||
assert len(clusters) == 2
|
||||
sizes = sorted(c["size"] for c in clusters)
|
||||
assert sizes == [2, 2]
|
||||
|
||||
def test_cluster_topics(self, archive):
|
||||
"""Cluster includes aggregated topics."""
|
||||
e1 = archive.add(_make_entry("Alpha", "content", topics=["ai", "models"]), auto_link=False)
|
||||
e2 = archive.add(_make_entry("Beta", "content", topics=["ai", "training"]), auto_link=False)
|
||||
e1.links.append(e2.id)
|
||||
e2.links.append(e1.id)
|
||||
archive._save()
|
||||
|
||||
clusters = archive.graph_clusters(min_size=2)
|
||||
assert "ai" in clusters[0]["top_topics"]
|
||||
|
||||
def test_density_calculation(self, archive):
|
||||
"""Triangle (3 nodes, 3 edges) has density 1.0."""
|
||||
e1 = archive.add(_make_entry("A", "aaa"), auto_link=False)
|
||||
e2 = archive.add(_make_entry("B", "bbb"), auto_link=False)
|
||||
e3 = archive.add(_make_entry("C", "ccc"), auto_link=False)
|
||||
# Fully connected triangle
|
||||
for e, others in [(e1, [e2, e3]), (e2, [e1, e3]), (e3, [e1, e2])]:
|
||||
for o in others:
|
||||
e.links.append(o.id)
|
||||
archive._save()
|
||||
|
||||
clusters = archive.graph_clusters(min_size=2)
|
||||
assert len(clusters) == 1
|
||||
assert clusters[0]["internal_edges"] == 3
|
||||
assert clusters[0]["density"] == 1.0 # 3 edges / 3 possible
|
||||
|
||||
def test_chain_density(self, archive):
|
||||
"""A-B-C chain has density 2/3 (2 edges out of 3 possible)."""
|
||||
e1 = archive.add(_make_entry("A", "aaa"), auto_link=False)
|
||||
e2 = archive.add(_make_entry("B", "bbb"), auto_link=False)
|
||||
e3 = archive.add(_make_entry("C", "ccc"), auto_link=False)
|
||||
# Chain: A-B-C
|
||||
e1.links.append(e2.id)
|
||||
e2.links.extend([e1.id, e3.id])
|
||||
e3.links.append(e2.id)
|
||||
archive._save()
|
||||
|
||||
clusters = archive.graph_clusters(min_size=2)
|
||||
assert abs(clusters[0]["density"] - 2/3) < 0.01
|
||||
|
||||
|
||||
class TestHubEntries:
|
||||
"""Test hub_entries() degree centrality ranking."""
|
||||
|
||||
def test_empty(self, archive):
|
||||
assert archive.hub_entries() == []
|
||||
|
||||
def test_no_links(self, archive):
|
||||
archive.add(_make_entry("Lone"), auto_link=False)
|
||||
assert archive.hub_entries() == []
|
||||
|
||||
def test_hub_ordering(self, archive):
|
||||
"""Entry with most links is ranked first."""
|
||||
e1 = archive.add(_make_entry("Hub", "central node"), auto_link=False)
|
||||
e2 = archive.add(_make_entry("Spoke 1", "content"), auto_link=False)
|
||||
e3 = archive.add(_make_entry("Spoke 2", "content"), auto_link=False)
|
||||
e4 = archive.add(_make_entry("Spoke 3", "content"), auto_link=False)
|
||||
|
||||
# e1 connects to all spokes
|
||||
e1.links.extend([e2.id, e3.id, e4.id])
|
||||
e2.links.append(e1.id)
|
||||
e3.links.append(e1.id)
|
||||
e4.links.append(e1.id)
|
||||
archive._save()
|
||||
|
||||
hubs = archive.hub_entries()
|
||||
assert len(hubs) == 4
|
||||
assert hubs[0]["entry"].id == e1.id
|
||||
assert hubs[0]["degree"] == 3
|
||||
|
||||
def test_limit(self, archive):
|
||||
e1 = archive.add(_make_entry("A", ""), auto_link=False)
|
||||
e2 = archive.add(_make_entry("B", ""), auto_link=False)
|
||||
e1.links.append(e2.id)
|
||||
e2.links.append(e1.id)
|
||||
archive._save()
|
||||
|
||||
assert len(archive.hub_entries(limit=1)) == 1
|
||||
|
||||
def test_inbound_outbound(self, archive):
|
||||
"""Inbound counts links TO an entry, outbound counts links FROM it."""
|
||||
e1 = archive.add(_make_entry("Source", ""), auto_link=False)
|
||||
e2 = archive.add(_make_entry("Target", ""), auto_link=False)
|
||||
# Only e1 links to e2
|
||||
e1.links.append(e2.id)
|
||||
archive._save()
|
||||
|
||||
hubs = archive.hub_entries()
|
||||
h1 = next(h for h in hubs if h["entry"].id == e1.id)
|
||||
h2 = next(h for h in hubs if h["entry"].id == e2.id)
|
||||
assert h1["inbound"] == 0
|
||||
assert h1["outbound"] == 1
|
||||
assert h2["inbound"] == 1
|
||||
assert h2["outbound"] == 0
|
||||
|
||||
|
||||
class TestBridgeEntries:
|
||||
"""Test bridge_entries() articulation point detection."""
|
||||
|
||||
def test_empty(self, archive):
|
||||
assert archive.bridge_entries() == []
|
||||
|
||||
def test_no_bridges_in_triangle(self, archive):
|
||||
"""Fully connected triangle has no articulation points."""
|
||||
e1 = archive.add(_make_entry("A", ""), auto_link=False)
|
||||
e2 = archive.add(_make_entry("B", ""), auto_link=False)
|
||||
e3 = archive.add(_make_entry("C", ""), auto_link=False)
|
||||
for e, others in [(e1, [e2, e3]), (e2, [e1, e3]), (e3, [e1, e2])]:
|
||||
for o in others:
|
||||
e.links.append(o.id)
|
||||
archive._save()
|
||||
|
||||
assert archive.bridge_entries() == []
|
||||
|
||||
def test_bridge_in_chain(self, archive):
|
||||
"""A-B-C chain: B is the articulation point."""
|
||||
e1 = archive.add(_make_entry("A", ""), auto_link=False)
|
||||
e2 = archive.add(_make_entry("B", ""), auto_link=False)
|
||||
e3 = archive.add(_make_entry("C", ""), auto_link=False)
|
||||
e1.links.append(e2.id)
|
||||
e2.links.extend([e1.id, e3.id])
|
||||
e3.links.append(e2.id)
|
||||
archive._save()
|
||||
|
||||
bridges = archive.bridge_entries()
|
||||
assert len(bridges) == 1
|
||||
assert bridges[0]["entry"].id == e2.id
|
||||
assert bridges[0]["components_after_removal"] == 2
|
||||
|
||||
def test_no_bridges_in_small_cluster(self, archive):
|
||||
"""Two-node clusters are too small for bridge detection."""
|
||||
e1 = archive.add(_make_entry("A", ""), auto_link=False)
|
||||
e2 = archive.add(_make_entry("B", ""), auto_link=False)
|
||||
e1.links.append(e2.id)
|
||||
e2.links.append(e1.id)
|
||||
archive._save()
|
||||
|
||||
assert archive.bridge_entries() == []
|
||||
|
||||
|
||||
class TestRebuildLinks:
|
||||
"""Test rebuild_links() full recomputation."""
|
||||
|
||||
def test_empty_archive(self, archive):
|
||||
assert archive.rebuild_links() == 0
|
||||
|
||||
def test_creates_links(self, archive):
|
||||
"""Rebuild creates links between similar entries."""
|
||||
archive.add(_make_entry("Alpha dogs canine training", "obedience training"), auto_link=False)
|
||||
archive.add(_make_entry("Beta dogs canine behavior", "behavior training"), auto_link=False)
|
||||
archive.add(_make_entry("Cat food feline nutrition", "fish meals"), auto_link=False)
|
||||
|
||||
total = archive.rebuild_links()
|
||||
assert total > 0
|
||||
|
||||
# Check that dog entries are linked to each other
|
||||
entries = list(archive._entries.values())
|
||||
dog_entries = [e for e in entries if "dog" in e.title.lower()]
|
||||
assert any(len(e.links) > 0 for e in dog_entries)
|
||||
|
||||
def test_override_threshold(self, archive):
|
||||
"""Lower threshold creates more links."""
|
||||
archive.add(_make_entry("Alpha dogs", "training"), auto_link=False)
|
||||
archive.add(_make_entry("Beta cats", "training"), auto_link=False)
|
||||
archive.add(_make_entry("Gamma birds", "training"), auto_link=False)
|
||||
|
||||
# Very low threshold = more links
|
||||
low_links = archive.rebuild_links(threshold=0.01)
|
||||
|
||||
# Reset
|
||||
for e in archive._entries.values():
|
||||
e.links = []
|
||||
|
||||
# Higher threshold = fewer links
|
||||
high_links = archive.rebuild_links(threshold=0.9)
|
||||
|
||||
assert low_links >= high_links
|
||||
|
||||
def test_rebuild_persists(self, archive):
|
||||
"""Rebuild saves to disk."""
|
||||
archive.add(_make_entry("Alpha dogs", "training"), auto_link=False)
|
||||
archive.add(_make_entry("Beta dogs", "training"), auto_link=False)
|
||||
archive.rebuild_links()
|
||||
|
||||
# Reload and verify links survived
|
||||
archive2 = MnemosyneArchive(archive_path=archive.path)
|
||||
entries = list(archive2._entries.values())
|
||||
total_links = sum(len(e.links) for e in entries)
|
||||
assert total_links > 0
|
||||
204
style.css
204
style.css
@@ -1713,3 +1713,207 @@ canvas#nexus-canvas {
|
||||
transform: translateX(16px);
|
||||
background: #4af0c0;
|
||||
}
|
||||
|
||||
/* ═══ MNEMOSYNE: Memory Inspect Panel (issue #1227) ═══ */
|
||||
.memory-inspect-panel {
|
||||
position: fixed;
|
||||
top: 50%;
|
||||
right: 20px;
|
||||
transform: translateY(-50%) translateX(20px);
|
||||
width: 320px;
|
||||
max-height: 80vh;
|
||||
background: rgba(10, 12, 20, 0.94);
|
||||
backdrop-filter: blur(16px);
|
||||
-webkit-backdrop-filter: blur(16px);
|
||||
border: 1px solid rgba(74, 240, 192, 0.25);
|
||||
border-radius: 12px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
z-index: 200;
|
||||
opacity: 0;
|
||||
transition: opacity 0.25s ease, transform 0.25s ease;
|
||||
box-shadow: 0 8px 40px rgba(0, 0, 0, 0.6), inset 0 1px 0 rgba(255, 255, 255, 0.05);
|
||||
overflow: hidden;
|
||||
pointer-events: none;
|
||||
}
|
||||
.memory-inspect-panel.mi-visible {
|
||||
opacity: 1;
|
||||
transform: translateY(-50%) translateX(0);
|
||||
pointer-events: auto;
|
||||
}
|
||||
|
||||
.mi-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
padding: 14px 14px 12px;
|
||||
border-bottom: 1px solid rgba(74, 240, 192, 0.12);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.mi-region-glyph {
|
||||
font-size: 20px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.mi-header-text {
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
}
|
||||
.mi-id {
|
||||
color: var(--color-text-bright);
|
||||
font-size: 11px;
|
||||
font-weight: 600;
|
||||
letter-spacing: 0.3px;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
.mi-region {
|
||||
font-size: 11px;
|
||||
margin-top: 2px;
|
||||
letter-spacing: 0.3px;
|
||||
}
|
||||
.mi-close {
|
||||
background: none;
|
||||
border: none;
|
||||
color: rgba(255, 255, 255, 0.35);
|
||||
font-size: 15px;
|
||||
cursor: pointer;
|
||||
padding: 2px 6px;
|
||||
border-radius: 4px;
|
||||
transition: color 0.15s, background 0.15s;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.mi-close:hover {
|
||||
color: #fff;
|
||||
background: rgba(255, 255, 255, 0.1);
|
||||
}
|
||||
|
||||
.mi-body {
|
||||
overflow-y: auto;
|
||||
padding: 12px 0 8px;
|
||||
flex: 1;
|
||||
}
|
||||
.mi-body::-webkit-scrollbar { width: 4px; }
|
||||
.mi-body::-webkit-scrollbar-track { background: transparent; }
|
||||
.mi-body::-webkit-scrollbar-thumb { background: rgba(74, 240, 192, 0.2); border-radius: 2px; }
|
||||
|
||||
.mi-section {
|
||||
padding: 6px 16px 10px;
|
||||
border-bottom: 1px solid rgba(255, 255, 255, 0.05);
|
||||
}
|
||||
.mi-section:last-child { border-bottom: none; }
|
||||
|
||||
.mi-section-label {
|
||||
color: rgba(74, 240, 192, 0.6);
|
||||
font-size: 9px;
|
||||
font-weight: 700;
|
||||
letter-spacing: 1px;
|
||||
margin-bottom: 6px;
|
||||
}
|
||||
|
||||
.mi-content {
|
||||
color: var(--color-text);
|
||||
font-size: 12px;
|
||||
line-height: 1.55;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
max-height: 140px;
|
||||
overflow-y: auto;
|
||||
}
|
||||
.mi-content::-webkit-scrollbar { width: 3px; }
|
||||
.mi-content::-webkit-scrollbar-thumb { background: rgba(255, 255, 255, 0.15); border-radius: 2px; }
|
||||
|
||||
.mi-vitality-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
}
|
||||
.mi-vitality-bar-track {
|
||||
flex: 1;
|
||||
height: 6px;
|
||||
background: rgba(255, 255, 255, 0.08);
|
||||
border-radius: 3px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.mi-vitality-bar {
|
||||
height: 100%;
|
||||
border-radius: 3px;
|
||||
transition: width 0.4s ease;
|
||||
}
|
||||
.mi-vitality-pct {
|
||||
font-size: 11px;
|
||||
font-weight: 600;
|
||||
flex-shrink: 0;
|
||||
width: 34px;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.mi-links {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 6px;
|
||||
}
|
||||
.mi-link-btn {
|
||||
background: rgba(123, 92, 255, 0.12);
|
||||
border: 1px solid rgba(123, 92, 255, 0.35);
|
||||
color: #b8a0ff;
|
||||
font-size: 10px;
|
||||
padding: 3px 8px;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
font-family: inherit;
|
||||
transition: all 0.15s;
|
||||
max-width: 200px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.mi-link-btn:hover {
|
||||
background: rgba(123, 92, 255, 0.25);
|
||||
border-color: #7b5cff;
|
||||
color: #fff;
|
||||
}
|
||||
.mi-empty {
|
||||
color: rgba(255, 255, 255, 0.3);
|
||||
font-size: 11px;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.mi-meta-row {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: baseline;
|
||||
gap: 8px;
|
||||
font-size: 11px;
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
.mi-meta-key {
|
||||
color: rgba(255, 255, 255, 0.4);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.mi-meta-val {
|
||||
color: var(--color-text);
|
||||
text-align: right;
|
||||
word-break: break-all;
|
||||
}
|
||||
|
||||
.mi-actions {
|
||||
padding: 8px 16px 4px;
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
}
|
||||
.mi-action-btn {
|
||||
background: rgba(74, 240, 192, 0.08);
|
||||
border: 1px solid rgba(74, 240, 192, 0.25);
|
||||
color: #4af0c0;
|
||||
font-size: 11px;
|
||||
padding: 5px 12px;
|
||||
border-radius: 6px;
|
||||
cursor: pointer;
|
||||
font-family: inherit;
|
||||
transition: all 0.15s;
|
||||
}
|
||||
.mi-action-btn:hover {
|
||||
background: rgba(74, 240, 192, 0.18);
|
||||
border-color: #4af0c0;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user