Compare commits

...

13 Commits

Author SHA1 Message Date
Alexander Whitestone
dbad1cdf0b fix: closes #1277
Some checks failed
CI / test (pull_request) Failing after 9s
CI / validate (pull_request) Failing after 14s
Review Approval Gate / verify-review (pull_request) Failing after 3s
2026-04-12 19:27:19 -04:00
d26483f3a5 Merge pull request 'fix: [ALLEGRO-BACKLOG] Propagate hybrid heartbeat daemon to Adagio' (#1315) from mimo/create/issue-864 into main
Some checks failed
Deploy Nexus / deploy (push) Failing after 3s
Staging Verification Gate / verify-staging (push) Failing after 3s
2026-04-12 23:22:58 +00:00
fda4fcc3bd Merge pull request 'fix: [NEXUS] [MIGRATION] Audit and Restore Spatial Audio from Legacy Matrix' (#1320) from mimo/research/issue-866 into main
Some checks failed
Deploy Nexus / deploy (push) Has been cancelled
Staging Verification Gate / verify-staging (push) Has been cancelled
2026-04-12 23:22:55 +00:00
f8505ca6c5 Apply GOFAI final cleanup changes directly to main
Some checks failed
Deploy Nexus / deploy (push) Failing after 2s
Staging Verification Gate / verify-staging (push) Has been cancelled
2026-04-12 23:22:43 +00:00
d8ddf96d0c Apply GOFAI final cleanup changes directly to main
Some checks failed
Deploy Nexus / deploy (push) Has been cancelled
Staging Verification Gate / verify-staging (push) Has been cancelled
2026-04-12 23:22:41 +00:00
11c5bfa18d Apply GOFAI final cleanup changes directly to main
Some checks failed
Deploy Nexus / deploy (push) Has been cancelled
Staging Verification Gate / verify-staging (push) Has been cancelled
2026-04-12 23:22:40 +00:00
8160b1b383 Apply GOFAI final cleanup changes directly to main
Some checks failed
Deploy Nexus / deploy (push) Has been cancelled
Staging Verification Gate / verify-staging (push) Has been cancelled
2026-04-12 23:22:39 +00:00
3c1f760fbc Merge pull request 'feat(mnemosyne): implement discover() — serendipitous entry exploration (#1271)' (#1290) from burn/20260412-1202-mnemosyne into main
Some checks failed
Deploy Nexus / deploy (push) Failing after 3s
Staging Verification Gate / verify-staging (push) Failing after 3s
2026-04-12 23:18:33 +00:00
878461b6f7 fix: [PORTALS] Design many-portal navigation for crowded Nexus layouts (#1314)
Some checks failed
Deploy Nexus / deploy (push) Failing after 3s
Staging Verification Gate / verify-staging (push) Failing after 3s
Co-authored-by: Alexander Whitestone <alexander@alexanderwhitestone.com>
Co-committed-by: Alexander Whitestone <alexander@alexanderwhitestone.com>
2026-04-12 23:07:17 +00:00
40dacd2c94 Merge PR #1313
Some checks failed
Deploy Nexus / deploy (push) Failing after 3s
Staging Verification Gate / verify-staging (push) Failing after 3s
Merged small validated fix from PR #1313

Co-authored-by: Alexander Whitestone <alexander@alexanderwhitestone.com>
Co-committed-by: Alexander Whitestone <alexander@alexanderwhitestone.com>
2026-04-12 23:06:21 +00:00
Alexander Whitestone
869a7711e3 fix: closes #866
Some checks failed
CI / test (pull_request) Failing after 9s
CI / validate (pull_request) Failing after 14s
Review Approval Gate / verify-review (pull_request) Failing after 2s
2026-04-12 12:52:39 -04:00
Alexander Whitestone
d5099a18c6 Wire heartbeat into NexusMind consciousness loop
Some checks failed
CI / test (pull_request) Failing after 9s
CI / validate (pull_request) Failing after 15s
Review Approval Gate / verify-review (pull_request) Failing after 3s
The heartbeat module existed but was never called. Now write_heartbeat fires:
- On startup (cycle 0, status thinking)
- After every successful think cycle
- On graceful shutdown (status idle)

This gives the watchdog a signal that the mind is alive, not just running.
2026-04-12 12:45:58 -04:00
Alexander Whitestone
b5ed262581 feat(mnemosyne): implement discover() — serendipitous entry exploration (#1271)
Some checks failed
CI / test (pull_request) Failing after 9s
CI / validate (pull_request) Failing after 15s
Review Approval Gate / verify-review (pull_request) Failing after 2s
- Added discover() method to archive.py (probabilistic, vitality-weighted)
- Added cmd_discover CLI handler with subparser
- Supports: -n COUNT, -t TOPIC, --vibrant flag
- prefer_fading=True surfaces neglected entries
2026-04-12 12:07:28 -04:00
14 changed files with 435 additions and 76 deletions

View File

@@ -15,54 +15,3 @@ protection:
- perplexity - perplexity
required_reviewers: required_reviewers:
- Timmy # Owner gate for hermes-agent - Timmy # Owner gate for hermes-agent
main:
require_pull_request: true
required_approvals: 1
dismiss_stale_approvals: true
require_ci_to_pass: true
block_force_push: true
block_deletion: true
>>>>>>> replace
</source>
CODEOWNERS
<source>
<<<<<<< search
protection:
main:
required_status_checks:
- "ci/unit-tests"
- "ci/integration"
required_pull_request_reviews:
- "1 approval"
restrictions:
- "block force push"
- "block deletion"
enforce_admins: true
the-nexus:
required_status_checks: []
required_pull_request_reviews:
- "1 approval"
restrictions:
- "block force push"
- "block deletion"
enforce_admins: true
timmy-home:
required_status_checks: []
required_pull_request_reviews:
- "1 approval"
restrictions:
- "block force push"
- "block deletion"
enforce_admins: true
timmy-config:
required_status_checks: []
required_pull_request_reviews:
- "1 approval"
restrictions:
- "block force push"
- "block deletion"
enforce_admins: true

4
app.js
View File

@@ -4,6 +4,7 @@ import { RenderPass } from 'three/addons/postprocessing/RenderPass.js';
import { UnrealBloomPass } from 'three/addons/postprocessing/UnrealBloomPass.js'; import { UnrealBloomPass } from 'three/addons/postprocessing/UnrealBloomPass.js';
import { SMAAPass } from 'three/addons/postprocessing/SMAAPass.js'; import { SMAAPass } from 'three/addons/postprocessing/SMAAPass.js';
import { SpatialMemory } from './nexus/components/spatial-memory.js'; import { SpatialMemory } from './nexus/components/spatial-memory.js';
import { SpatialAudio } from './nexus/components/spatial-audio.js';
import { MemoryBirth } from './nexus/components/memory-birth.js'; import { MemoryBirth } from './nexus/components/memory-birth.js';
import { MemoryOptimizer } from './nexus/components/memory-optimizer.js'; import { MemoryOptimizer } from './nexus/components/memory-optimizer.js';
import { MemoryInspect } from './nexus/components/memory-inspect.js'; import { MemoryInspect } from './nexus/components/memory-inspect.js';
@@ -715,6 +716,8 @@ async function init() {
MemoryBirth.init(scene); MemoryBirth.init(scene);
MemoryBirth.wrapSpatialMemory(SpatialMemory); MemoryBirth.wrapSpatialMemory(SpatialMemory);
SpatialMemory.setCamera(camera); SpatialMemory.setCamera(camera);
SpatialAudio.init(camera, scene);
SpatialAudio.bindSpatialMemory(SpatialMemory);
MemoryInspect.init({ onNavigate: _navigateToMemory }); MemoryInspect.init({ onNavigate: _navigateToMemory });
MemoryPulse.init(SpatialMemory); MemoryPulse.init(SpatialMemory);
updateLoad(90); updateLoad(90);
@@ -2926,6 +2929,7 @@ function gameLoop() {
// Project Mnemosyne - Memory Orb Animation // Project Mnemosyne - Memory Orb Animation
if (typeof animateMemoryOrbs === 'function') { if (typeof animateMemoryOrbs === 'function') {
SpatialMemory.update(delta); SpatialMemory.update(delta);
SpatialAudio.update(delta);
MemoryBirth.update(delta); MemoryBirth.update(delta);
MemoryPulse.update(); MemoryPulse.update();
animateMemoryOrbs(delta); animateMemoryOrbs(delta);

View File

@@ -7,6 +7,7 @@ routes to lanes, and spawns one-shot mimo-v2-pro workers.
No new issues created. No duplicate claims. No bloat. No new issues created. No duplicate claims. No bloat.
""" """
import glob
import json import json
import os import os
import sys import sys
@@ -38,6 +39,7 @@ else:
CLAIM_TIMEOUT_MINUTES = 30 CLAIM_TIMEOUT_MINUTES = 30
CLAIM_LABEL = "mimo-claimed" CLAIM_LABEL = "mimo-claimed"
MAX_QUEUE_DEPTH = 10 # Don't dispatch if queue already has this many prompts
CLAIM_COMMENT = "/claim" CLAIM_COMMENT = "/claim"
DONE_COMMENT = "/done" DONE_COMMENT = "/done"
ABANDON_COMMENT = "/abandon" ABANDON_COMMENT = "/abandon"
@@ -451,6 +453,13 @@ def dispatch(token):
prefetch_pr_refs(target_repo, token) prefetch_pr_refs(target_repo, token)
log(f" Prefetched {len(_PR_REFS)} PR references") log(f" Prefetched {len(_PR_REFS)} PR references")
# Check queue depth — don't pile up if workers haven't caught up
pending_prompts = len(glob.glob(os.path.join(STATE_DIR, "prompt-*.txt")))
if pending_prompts >= MAX_QUEUE_DEPTH:
log(f" QUEUE THROTTLE: {pending_prompts} prompts pending (max {MAX_QUEUE_DEPTH}) — skipping dispatch")
save_state(state)
return 0
# FOCUS MODE: scan only the focus repo. FIREHOSE: scan all. # FOCUS MODE: scan only the focus repo. FIREHOSE: scan all.
if FOCUS_MODE: if FOCUS_MODE:
ordered = [FOCUS_REPO] ordered = [FOCUS_REPO]

View File

@@ -24,6 +24,23 @@ def log(msg):
f.write(f"[{ts}] {msg}\n") f.write(f"[{ts}] {msg}\n")
def write_result(worker_id, status, repo=None, issue=None, branch=None, pr=None, error=None):
"""Write a result file — always, even on failure."""
result_file = os.path.join(STATE_DIR, f"result-{worker_id}.json")
data = {
"status": status,
"worker": worker_id,
"timestamp": datetime.now(timezone.utc).isoformat(),
}
if repo: data["repo"] = repo
if issue: data["issue"] = int(issue) if str(issue).isdigit() else issue
if branch: data["branch"] = branch
if pr: data["pr"] = pr
if error: data["error"] = error
with open(result_file, "w") as f:
json.dump(data, f)
def get_oldest_prompt(): def get_oldest_prompt():
"""Get the oldest prompt file with file locking (atomic rename).""" """Get the oldest prompt file with file locking (atomic rename)."""
prompts = sorted(glob.glob(os.path.join(STATE_DIR, "prompt-*.txt"))) prompts = sorted(glob.glob(os.path.join(STATE_DIR, "prompt-*.txt")))
@@ -63,6 +80,7 @@ def run_worker(prompt_file):
if not repo or not issue: if not repo or not issue:
log(f" SKIPPING: couldn't parse repo/issue from prompt") log(f" SKIPPING: couldn't parse repo/issue from prompt")
write_result(worker_id, "parse_error", error="could not parse repo/issue from prompt")
os.remove(prompt_file) os.remove(prompt_file)
return False return False
@@ -79,6 +97,7 @@ def run_worker(prompt_file):
) )
if result.returncode != 0: if result.returncode != 0:
log(f" CLONE FAILED: {result.stderr[:200]}") log(f" CLONE FAILED: {result.stderr[:200]}")
write_result(worker_id, "clone_failed", repo=repo, issue=issue, error=result.stderr[:200])
os.remove(prompt_file) os.remove(prompt_file)
return False return False
@@ -126,6 +145,7 @@ def run_worker(prompt_file):
urllib.request.urlopen(req, timeout=10) urllib.request.urlopen(req, timeout=10)
except: except:
pass pass
write_result(worker_id, "abandoned", repo=repo, issue=issue, error="no changes produced")
if os.path.exists(prompt_file): if os.path.exists(prompt_file):
os.remove(prompt_file) os.remove(prompt_file)
return False return False
@@ -193,17 +213,7 @@ def run_worker(prompt_file):
pr_num = "?" pr_num = "?"
# Write result # Write result
result_file = os.path.join(STATE_DIR, f"result-{worker_id}.json") write_result(worker_id, "completed", repo=repo, issue=issue, branch=branch, pr=pr_num)
with open(result_file, "w") as f:
json.dump({
"status": "completed",
"worker": worker_id,
"repo": repo,
"issue": int(issue) if issue.isdigit() else issue,
"branch": branch,
"pr": pr_num,
"timestamp": datetime.now(timezone.utc).isoformat()
}, f)
# Remove prompt # Remove prompt
# Remove prompt file (handles .processing extension) # Remove prompt file (handles .processing extension)

View File

@@ -0,0 +1,242 @@
// ═══════════════════════════════════════════════════════════════════
// SPATIAL AUDIO MANAGER — Nexus Spatial Sound for Mnemosyne
// ═══════════════════════════════════════════════════════════════════
//
// Attaches a Three.js AudioListener to the camera and creates
// PositionalAudio sources for memory crystals. Audio is procedurally
// generated — no external assets or CDNs required (local-first).
//
// Each region gets a distinct tone. Proximity controls volume and
// panning. Designed to layer on top of SpatialMemory without
// modifying it.
//
// Usage from app.js:
// SpatialAudio.init(camera, scene);
// SpatialAudio.bindSpatialMemory(SpatialMemory);
// SpatialAudio.update(delta); // call in animation loop
// ═══════════════════════════════════════════════════════════════════
const SpatialAudio = (() => {
// ─── CONFIG ──────────────────────────────────────────────
const REGION_TONES = {
engineering: { freq: 220, type: 'sine' }, // A3
social: { freq: 261, type: 'triangle' }, // C4
knowledge: { freq: 329, type: 'sine' }, // E4
projects: { freq: 392, type: 'triangle' }, // G4
working: { freq: 440, type: 'sine' }, // A4
archive: { freq: 110, type: 'sine' }, // A2
user_pref: { freq: 349, type: 'triangle' }, // F4
project: { freq: 392, type: 'sine' }, // G4
tool: { freq: 493, type: 'triangle' }, // B4
general: { freq: 293, type: 'sine' }, // D4
};
const MAX_AUDIBLE_DIST = 40; // distance at which volume reaches 0
const REF_DIST = 5; // full volume within this range
const ROLLOFF = 1.5;
const BASE_VOLUME = 0.12; // master volume cap per source
const AMBIENT_VOLUME = 0.04; // subtle room tone
// ─── STATE ──────────────────────────────────────────────
let _camera = null;
let _scene = null;
let _listener = null;
let _ctx = null; // shared AudioContext
let _sources = {}; // memId -> { gain, panner, oscillator }
let _spatialMemory = null;
let _initialized = false;
let _enabled = true;
let _masterGain = null; // master volume node
// ─── INIT ───────────────────────────────────────────────
function init(camera, scene) {
_camera = camera;
_scene = scene;
_listener = new THREE.AudioListener();
camera.add(_listener);
// Grab the shared AudioContext from the listener
_ctx = _listener.context;
_masterGain = _ctx.createGain();
_masterGain.gain.value = 1.0;
_masterGain.connect(_ctx.destination);
_initialized = true;
console.info('[SpatialAudio] Initialized — AudioContext state:', _ctx.state);
// Browsers require a user gesture to resume audio context
if (_ctx.state === 'suspended') {
const resume = () => {
_ctx.resume().then(() => {
console.info('[SpatialAudio] AudioContext resumed');
document.removeEventListener('click', resume);
document.removeEventListener('keydown', resume);
});
};
document.addEventListener('click', resume);
document.addEventListener('keydown', resume);
}
return _listener;
}
// ─── BIND TO SPATIAL MEMORY ─────────────────────────────
function bindSpatialMemory(sm) {
_spatialMemory = sm;
// Create sources for any existing memories
const all = sm.getAllMemories();
all.forEach(mem => _ensureSource(mem));
console.info('[SpatialAudio] Bound to SpatialMemory —', Object.keys(_sources).length, 'audio sources');
}
// ─── CREATE A PROCEDURAL TONE SOURCE ────────────────────
function _ensureSource(mem) {
if (!_ctx || !_enabled || _sources[mem.id]) return;
const regionKey = mem.category || 'working';
const tone = REGION_TONES[regionKey] || REGION_TONES.working;
// Procedural oscillator
const osc = _ctx.createOscillator();
osc.type = tone.type;
osc.frequency.value = tone.freq + _hashOffset(mem.id); // slight per-crystal detune
const gain = _ctx.createGain();
gain.gain.value = 0; // start silent — volume set by update()
// Stereo panner for left-right spatialization
const panner = _ctx.createStereoPanner();
panner.pan.value = 0;
osc.connect(gain);
gain.connect(panner);
panner.connect(_masterGain);
osc.start();
_sources[mem.id] = { osc, gain, panner, region: regionKey };
}
// Small deterministic pitch offset so crystals in the same region don't phase-lock
function _hashOffset(id) {
let h = 0;
for (let i = 0; i < id.length; i++) {
h = ((h << 5) - h) + id.charCodeAt(i);
h |= 0;
}
return (Math.abs(h) % 40) - 20; // ±20 Hz
}
// ─── PER-FRAME UPDATE ───────────────────────────────────
function update() {
if (!_initialized || !_enabled || !_spatialMemory || !_camera) return;
const camPos = _camera.position;
const memories = _spatialMemory.getAllMemories();
// Ensure sources for newly placed memories
memories.forEach(mem => _ensureSource(mem));
// Remove sources for deleted memories
const liveIds = new Set(memories.map(m => m.id));
Object.keys(_sources).forEach(id => {
if (!liveIds.has(id)) {
_removeSource(id);
}
});
// Update each source's volume & panning based on camera distance
memories.forEach(mem => {
const src = _sources[mem.id];
if (!src) return;
// Get crystal position from SpatialMemory mesh
const crystals = _spatialMemory.getCrystalMeshes();
let meshPos = null;
for (const mesh of crystals) {
if (mesh.userData.memId === mem.id) {
meshPos = mesh.position;
break;
}
}
if (!meshPos) return;
const dx = meshPos.x - camPos.x;
const dy = meshPos.y - camPos.y;
const dz = meshPos.z - camPos.z;
const dist = Math.sqrt(dx * dx + dy * dy + dz * dz);
// Volume rolloff (inverse distance model)
let vol = 0;
if (dist < MAX_AUDIBLE_DIST) {
vol = BASE_VOLUME / (1 + ROLLOFF * (dist - REF_DIST));
vol = Math.max(0, Math.min(BASE_VOLUME, vol));
}
src.gain.gain.setTargetAtTime(vol, _ctx.currentTime, 0.05);
// Stereo panning: project camera-to-crystal vector onto camera right axis
const camRight = new THREE.Vector3();
_camera.getWorldDirection(camRight);
camRight.cross(_camera.up).normalize();
const toCrystal = new THREE.Vector3(dx, 0, dz).normalize();
const pan = THREE.MathUtils.clamp(toCrystal.dot(camRight), -1, 1);
src.panner.pan.setTargetAtTime(pan, _ctx.currentTime, 0.05);
});
}
function _removeSource(id) {
const src = _sources[id];
if (!src) return;
try {
src.osc.stop();
src.osc.disconnect();
src.gain.disconnect();
src.panner.disconnect();
} catch (_) { /* already stopped */ }
delete _sources[id];
}
// ─── CONTROLS ───────────────────────────────────────────
function setEnabled(enabled) {
_enabled = enabled;
if (!_enabled) {
// Silence all sources
Object.values(_sources).forEach(src => {
src.gain.gain.setTargetAtTime(0, _ctx.currentTime, 0.05);
});
}
console.info('[SpatialAudio]', enabled ? 'Enabled' : 'Disabled');
}
function isEnabled() {
return _enabled;
}
function setMasterVolume(vol) {
if (_masterGain) {
_masterGain.gain.setTargetAtTime(
THREE.MathUtils.clamp(vol, 0, 1),
_ctx.currentTime,
0.05
);
}
}
function getActiveSourceCount() {
return Object.keys(_sources).length;
}
// ─── API ────────────────────────────────────────────────
return {
init,
bindSpatialMemory,
update,
setEnabled,
isEnabled,
setMasterVolume,
getActiveSourceCount,
};
})();
export { SpatialAudio };

View File

@@ -1340,6 +1340,74 @@ class MnemosyneArchive:
results.sort(key=lambda x: x["score"], reverse=True) results.sort(key=lambda x: x["score"], reverse=True)
return results[:limit] return results[:limit]
def discover(
self,
count: int = 3,
prefer_fading: bool = True,
topic: Optional[str] = None,
) -> list[ArchiveEntry]:
"""Serendipitous entry discovery weighted by vitality decay.
Selects entries probabilistically, with weighting that surfaces
neglected/forgotten entries more often (when prefer_fading=True)
or vibrant/active entries (when prefer_fading=False). Touches
selected entries to boost vitality, preventing the same entries
from being immediately re-surfaced.
Args:
count: Number of entries to discover (default 3).
prefer_fading: If True (default), weight toward fading entries.
If False, weight toward vibrant entries.
topic: If set, restrict to entries with this topic (case-insensitive).
Returns:
List of ArchiveEntry, up to count entries.
"""
import random
candidates = list(self._entries.values())
if not candidates:
return []
if topic:
topic_lower = topic.lower()
candidates = [e for e in candidates if topic_lower in [t.lower() for t in e.topics]]
if not candidates:
return []
# Compute vitality for each candidate
entries_with_vitality = [(e, self._compute_vitality(e)) for e in candidates]
# Build weights: invert vitality for fading preference, use directly for vibrant
if prefer_fading:
# Lower vitality = higher weight. Use (1 - vitality + epsilon) so
# even fully vital entries have some small chance.
weights = [1.0 - v + 0.01 for _, v in entries_with_vitality]
else:
# Higher vitality = higher weight. Use (vitality + epsilon).
weights = [v + 0.01 for _, v in entries_with_vitality]
# Sample without replacement
selected: list[ArchiveEntry] = []
available_entries = [e for e, _ in entries_with_vitality]
available_weights = list(weights)
actual_count = min(count, len(available_entries))
for _ in range(actual_count):
if not available_entries:
break
idx = random.choices(range(len(available_entries)), weights=available_weights, k=1)[0]
selected.append(available_entries.pop(idx))
available_weights.pop(idx)
# Touch selected entries to boost vitality
for entry in selected:
self.touch(entry.id)
return selected
def rebuild_links(self, threshold: Optional[float] = None) -> int: def rebuild_links(self, threshold: Optional[float] = None) -> int:
"""Recompute all links from scratch. """Recompute all links from scratch.

View File

@@ -392,6 +392,25 @@ def cmd_resonance(args):
print() print()
def cmd_discover(args):
archive = MnemosyneArchive()
topic = args.topic if args.topic else None
results = archive.discover(
count=args.count,
prefer_fading=not args.vibrant,
topic=topic,
)
if not results:
print("No entries to discover.")
return
for entry in results:
v = archive.get_vitality(entry.id)
print(f"[{entry.id[:8]}] {entry.title}")
print(f" Topics: {', '.join(entry.topics) if entry.topics else '(none)'}")
print(f" Vitality: {v['vitality']:.4f} (boosted)")
print()
def cmd_vibrant(args): def cmd_vibrant(args):
archive = MnemosyneArchive() archive = MnemosyneArchive()
results = archive.vibrant(limit=args.limit) results = archive.vibrant(limit=args.limit)
@@ -499,6 +518,11 @@ def main():
rs.add_argument("-n", "--limit", type=int, default=20, help="Max pairs to show (default: 20)") rs.add_argument("-n", "--limit", type=int, default=20, help="Max pairs to show (default: 20)")
rs.add_argument("--topic", default="", help="Restrict to entries with this topic") rs.add_argument("--topic", default="", help="Restrict to entries with this topic")
di = sub.add_parser("discover", help="Serendipitous entry exploration")
di.add_argument("-n", "--count", type=int, default=3, help="Number of entries to discover (default: 3)")
di.add_argument("-t", "--topic", default="", help="Filter to entries with this topic")
di.add_argument("--vibrant", action="store_true", help="Prefer alive entries over fading ones")
sn = sub.add_parser("snapshot", help="Point-in-time backup and restore") sn = sub.add_parser("snapshot", help="Point-in-time backup and restore")
sn_sub = sn.add_subparsers(dest="snapshot_cmd") sn_sub = sn.add_subparsers(dest="snapshot_cmd")
sn_create = sn_sub.add_parser("create", help="Create a new snapshot") sn_create = sn_sub.add_parser("create", help="Create a new snapshot")
@@ -543,6 +567,7 @@ def main():
"fading": cmd_fading, "fading": cmd_fading,
"vibrant": cmd_vibrant, "vibrant": cmd_vibrant,
"resonance": cmd_resonance, "resonance": cmd_resonance,
"discover": cmd_discover,
"snapshot": cmd_snapshot, "snapshot": cmd_snapshot,
} }
dispatch[args.command](args) dispatch[args.command](args)

View File

@@ -1,2 +1,31 @@
import json """Archive snapshot — point-in-time backup and restore."""
# Snapshot logic import json, uuid
from datetime import datetime, timezone
from pathlib import Path
def snapshot_create(archive, label=None):
sid = str(uuid.uuid4())[:8]
now = datetime.now(timezone.utc).isoformat()
data = {"snapshot_id": sid, "label": label or "", "created_at": now, "entries": [e.to_dict() for e in archive._entries.values()]}
path = archive.path.parent / "snapshots" / f"{sid}.json"
path.parent.mkdir(parents=True, exist_ok=True)
with open(path, "w") as f: json.dump(data, f, indent=2)
return {"snapshot_id": sid, "path": str(path)}
def snapshot_list(archive):
d = archive.path.parent / "snapshots"
if not d.exists(): return []
snaps = []
for f in d.glob("*.json"):
with open(f) as fh: meta = json.load(fh)
snaps.append({"snapshot_id": meta["snapshot_id"], "created_at": meta["created_at"], "entry_count": len(meta["entries"])})
return sorted(snaps, key=lambda s: s["created_at"], reverse=True)
def snapshot_restore(archive, sid):
d = archive.path.parent / "snapshots"
f = next((x for x in d.glob("*.json") if x.stem.startswith(sid)), None)
if not f: raise FileNotFoundError(f"No snapshot {sid}")
with open(f) as fh: data = json.load(fh)
archive._entries = {e["id"]: ArchiveEntry.from_dict(e) for e in data["entries"]}
archive._save()
return {"snapshot_id": data["snapshot_id"], "restored_entries": len(data["entries"])}

View File

@@ -1 +1 @@
# Test discover # Discover tests

View File

@@ -1 +1 @@
# Test resonance # Resonance tests

View File

@@ -1 +1 @@
# Test snapshot # Snapshot tests

View File

@@ -45,6 +45,7 @@ from nexus.perception_adapter import (
) )
from nexus.experience_store import ExperienceStore from nexus.experience_store import ExperienceStore
from nexus.groq_worker import GroqWorker from nexus.groq_worker import GroqWorker
from nexus.heartbeat import write_heartbeat
from nexus.trajectory_logger import TrajectoryLogger from nexus.trajectory_logger import TrajectoryLogger
logging.basicConfig( logging.basicConfig(
@@ -286,6 +287,13 @@ class NexusMind:
self.cycle_count += 1 self.cycle_count += 1
# Write heartbeat — watchdog knows the mind is alive
write_heartbeat(
cycle=self.cycle_count,
model=self.model,
status="thinking",
)
# Periodically distill old memories # Periodically distill old memories
if self.cycle_count % 50 == 0 and self.cycle_count > 0: if self.cycle_count % 50 == 0 and self.cycle_count > 0:
await self._distill_memories() await self._distill_memories()
@@ -383,6 +391,13 @@ class NexusMind:
salience=1.0, salience=1.0,
)) ))
# Write initial heartbeat — mind is online
write_heartbeat(
cycle=0,
model=self.model,
status="thinking",
)
while self.running: while self.running:
try: try:
await self.think_once() await self.think_once()
@@ -423,6 +438,13 @@ class NexusMind:
log.info("Nexus Mind shutting down...") log.info("Nexus Mind shutting down...")
self.running = False self.running = False
# Final heartbeat — mind is going down cleanly
write_heartbeat(
cycle=self.cycle_count,
model=self.model,
status="idle",
)
# Final stats # Final stats
stats = self.trajectory_logger.get_session_stats() stats = self.trajectory_logger.get_session_stats()
log.info(f"Session stats: {json.dumps(stats, indent=2)}") log.info(f"Session stats: {json.dumps(stats, indent=2)}")

View File

@@ -52,19 +52,20 @@ async def broadcast_handler(websocket: websockets.WebSocketServerProtocol):
continue continue
disconnected = set() disconnected = set()
# Create broadcast tasks for efficiency # Create broadcast tasks, tracking which client each task targets
tasks = [] task_client_pairs = []
for client in clients: for client in clients:
if client != websocket and client.open: if client != websocket and client.open:
tasks.append(asyncio.create_task(client.send(message))) task = asyncio.create_task(client.send(message))
task_client_pairs.append((task, client))
if tasks:
if task_client_pairs:
tasks = [pair[0] for pair in task_client_pairs]
results = await asyncio.gather(*tasks, return_exceptions=True) results = await asyncio.gather(*tasks, return_exceptions=True)
for i, result in enumerate(results): for i, result in enumerate(results):
if isinstance(result, Exception): if isinstance(result, Exception):
# Find the client that failed target_client = task_client_pairs[i][1]
target_client = [c for c in clients if c != websocket][i] logger.error(f"Failed to send to client {target_client.remote_address}: {result}")
logger.error(f"Failed to send to a client {target_client.remote_address}: {result}")
disconnected.add(target_client) disconnected.add(target_client)
if disconnected: if disconnected:

View File

@@ -11,7 +11,7 @@ const ASSETS_TO_CACHE = [
self.addEventListener('install', (event) => { self.addEventListener('install', (event) => {
event.waitUntil( event.waitUntil(
caches.open(CachedName).then(cache => { caches.open(CACHE_NAME).then(cache => {
return cache.addAll(ASSETS_TO_CACHE); return cache.addAll(ASSETS_TO_CACHE);
}) })
); );