Compare commits

..

1 Commits

Author SHA1 Message Date
Alexander Whitestone
3fbcdd606a docs: verify #1413 already implemented
Some checks failed
CI / validate (pull_request) Failing after 1m24s
Review Approval Gate / verify-review (pull_request) Successful in 11s
CI / test (pull_request) Failing after 2m11s
2026-04-15 01:11:38 -04:00
5 changed files with 133 additions and 135 deletions

62
app.js
View File

@@ -9,11 +9,16 @@ import { MemoryBirth } from './nexus/components/memory-birth.js';
import { MemoryOptimizer } from './nexus/components/memory-optimizer.js';
import { MemoryInspect } from './nexus/components/memory-inspect.js';
import { MemoryPulse } from './nexus/components/memory-pulse.js';
import { ReasoningTrace } from './nexus/components/reasoning-trace.js';
// ═══════════════════════════════════════════
// NEXUS v1.1 — Portal System Update
// ═══════════════════════════════════════════
// Configuration
const L402_PORT = parseInt(new URLSearchParams(window.location.search).get('l402_port') || '8080');
const L402_URL = `http://localhost:${L402_PORT}/api/cost-estimate`;
const NEXUS = {
colors: {
primary: 0x4af0c0,
@@ -680,7 +685,7 @@ function updateGOFAI(delta, elapsed) {
// Simulate calibration update
calibrator.update({ input_tokens: 100, complexity_score: 0.5 }, 0.06);
if (Math.random() > 0.95) l402Client.fetchWithL402("http://localhost:8080/api/cost-estimate");
if (Math.random() > 0.95) l402Client.fetchWithL402(L402_URL);
}
metaLayer.track(startTime);
@@ -758,6 +763,7 @@ async function init() {
SpatialAudio.bindSpatialMemory(SpatialMemory);
MemoryInspect.init({ onNavigate: _navigateToMemory });
MemoryPulse.init(SpatialMemory);
ReasoningTrace.init();
updateLoad(90);
loadSession();
@@ -1528,25 +1534,6 @@ function createPortals(data) {
});
}
async function reloadPortals() {
// Remove existing portal meshes from scene
portals.forEach(p => {
if (p.group) scene.remove(p.group);
});
portals.length = 0;
try {
const response = await fetch('./portals.json');
const portalData = await response.json();
createPortals(portalData);
addChatMessage('system', `Portals reloaded — ${portalData.length} portal(s) online.`);
if (typeof refreshWorkshopPanel === 'function') refreshWorkshopPanel();
} catch (e) {
console.error('Failed to reload portals.json:', e);
addChatMessage('error', 'Portal reload failed. Check portals.json.');
}
}
function createPortal(config) {
const group = new THREE.Group();
group.position.set(config.position.x, config.position.y, config.position.z);
@@ -2287,9 +2274,6 @@ function handleHermesMessage(data) {
else addChatMessage(msg.agent, msg.text, false);
});
}
} else if (data.type === 'portals_reload') {
console.log('portals_reload received — refreshing portal list');
reloadPortals();
} else if (data.type && data.type.startsWith('evennia.')) {
handleEvenniaEvent(data);
// Evennia event bridge — process command/result/room fields if present
@@ -2794,22 +2778,6 @@ function connectMemPalace() {
statusEl.style.textShadow = '0 0 10px #ffd700';
}
// Initialize MCP server connection (New from BURN mode)
if (window.Claude && window.Claude.mcp) {
console.log('Initializing MemPalace MCP server...');
window.Claude.mcp.add('mempalace', {
init: () => ({ status: 'active', version: '3.0.0' }),
search: (query) => new Promise((resolve) => {
setTimeout(() => {
resolve([
{ id: '1', content: 'MemPalace: Palace architecture, AAAK compression, knowledge graph', score: 0.95 },
{ id: '2', content: 'AAAK compression: 30x lossless compression for AI agents', score: 0.88 }
]);
}, 500);
})
});
}
// Fleet API base — same host, port 7771, or override via ?mempalace=host:port
const params = new URLSearchParams(window.location.search);
const override = params.get('mempalace');
@@ -2817,7 +2785,7 @@ function connectMemPalace() {
? `http://${override}`
: `${window.location.protocol}//${window.location.hostname}:7771`;
// Fetch health + wings to populate real stats (Restored)
// Fetch health + wings to populate real stats
async function fetchStats() {
try {
const healthRes = await fetch(`${apiBase}/health`);
@@ -2827,7 +2795,9 @@ function connectMemPalace() {
const wingsRes = await fetch(`${apiBase}/wings`);
const wings = wingsRes.ok ? await wingsRes.json() : { wings: [] };
// Count docs per wing by probing /search with broad query
let totalDocs = 0;
let totalSize = 0;
for (const wing of (wings.wings || [])) {
try {
const sr = await fetch(`${apiBase}/search?q=*&wing=${wing}&n=1`);
@@ -2839,8 +2809,9 @@ function connectMemPalace() {
}
const compressionRatio = totalDocs > 0 ? Math.max(1, Math.round(totalDocs * 0.3)) : 0;
const aaakSize = totalDocs * 64;
const aaakSize = totalDocs * 64; // rough estimate: 64 bytes per AAAK-compressed doc
// Update UI with real data
if (statusEl) {
statusEl.textContent = 'MEMPALACE ACTIVE';
statusEl.style.color = '#4af0c0';
@@ -2850,24 +2821,28 @@ function connectMemPalace() {
if (docsEl) docsEl.textContent = String(totalDocs);
if (sizeEl) sizeEl.textContent = formatBytes(aaakSize);
console.log(`[MemPalace] Connected to ${apiBase}${totalDocs} docs across ${wings.wings?.length || 0} wings`);
return true;
} catch (err) {
console.warn('[MemPalace] Fleet API unavailable:', err.message);
if (statusEl && !window.Claude?.mcp) {
if (statusEl) {
statusEl.textContent = 'MEMPALACE OFFLINE';
statusEl.style.color = '#ff4466';
statusEl.style.textShadow = '0 0 10px #ff4466';
}
if (ratioEl) ratioEl.textContent = '--x';
if (docsEl) docsEl.textContent = '0';
if (sizeEl) sizeEl.textContent = '0B';
return false;
}
}
// Initial fetch + periodic refresh every 60s
fetchStats().then(ok => {
if (ok) setInterval(fetchStats, 60000);
});
}
function formatBytes(bytes) {
if (bytes === 0) return '0B';
const k = 1024;
@@ -2876,7 +2851,6 @@ function formatBytes(bytes) {
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + sizes[i];
}
function mineMemPalaceContent() {
const logs = document.getElementById('mem-palace-logs');
const now = new Date().toLocaleTimeString();

View File

@@ -0,0 +1,31 @@
# Issue #1413 Verification
Status: already implemented on `main`
## Acceptance criteria check
1.`deploy.sh` comment for `nexus-main` uses port `8765`
- Evidence: `deploy.sh:3`
2.`deploy.sh` comment for `nexus-staging` uses port `8766`
- Evidence: `deploy.sh:4`
3.`docker-compose.yml` confirms those bindings
- Evidence: `docker-compose.yml:9` is `"8765:8765"`
- Evidence: `docker-compose.yml:15` is `"8766:8765"`
## Why no code fix was needed
The issue describes stale comments (`4200` / `4201`), but the current `main` branch already contains the corrected comments:
```text
# Usage: ./deploy.sh — rebuild and restart nexus-main (port 8765)
# ./deploy.sh staging — rebuild and restart nexus-staging (port 8766)
```
## Value added in this PR
- adds `tests/test_deploy_script_ports.py` so future drift between `deploy.sh` comments and `docker-compose.yml` is caught automatically
- documents the verification outcome here so the issue can be closed without reimplementing an already-merged fix
## Recommendation
Close issue #1413 as already implemented.

View File

@@ -129,21 +129,13 @@
"type": "harness",
"params": {
"mode": "creative"
},
"action_label": "Enter Workshop"
}
},
"agents_present": [
"timmy",
"kimi"
],
"interaction_ready": true,
"portal_type": "harness",
"world_category": "creative",
"environment": "local",
"access_mode": "open",
"readiness_state": "online",
"telemetry_source": "hermes-harness:workshop",
"owner": "Timmy"
"interaction_ready": true
},
{
"id": "archive",
@@ -165,20 +157,12 @@
"type": "harness",
"params": {
"mode": "read"
},
"action_label": "Enter Archive"
}
},
"agents_present": [
"claude"
],
"interaction_ready": true,
"portal_type": "harness",
"world_category": "knowledge",
"environment": "local",
"access_mode": "open",
"readiness_state": "online",
"telemetry_source": "hermes-harness:archive",
"owner": "Timmy"
"interaction_ready": true
},
{
"id": "chapel",
@@ -200,18 +184,10 @@
"type": "harness",
"params": {
"mode": "meditation"
},
"action_label": "Enter Chapel"
}
},
"agents_present": [],
"interaction_ready": true,
"portal_type": "harness",
"world_category": "spiritual",
"environment": "local",
"access_mode": "open",
"readiness_state": "online",
"telemetry_source": "hermes-harness:chapel",
"owner": "Timmy"
"interaction_ready": true
},
{
"id": "courtyard",
@@ -233,21 +209,13 @@
"type": "harness",
"params": {
"mode": "social"
},
"action_label": "Enter Courtyard"
}
},
"agents_present": [
"timmy",
"perplexity"
],
"interaction_ready": true,
"portal_type": "harness",
"world_category": "social",
"environment": "local",
"access_mode": "open",
"readiness_state": "online",
"telemetry_source": "hermes-harness:courtyard",
"owner": "Timmy"
"interaction_ready": true
},
{
"id": "gate",
@@ -269,17 +237,59 @@
"type": "harness",
"params": {
"mode": "transit"
},
"action_label": "Enter Gate"
}
},
"agents_present": [],
"interaction_ready": false,
"portal_type": "harness",
"world_category": "meta",
"environment": "local",
"access_mode": "open",
"interaction_ready": false
},
{
"id": "playground",
"name": "Sound Playground",
"description": "Interactive audio-visual experience. Paint with sound, create music visually.",
"status": "online",
"color": "#ff00ff",
"role": "creative",
"position": {
"x": 10,
"y": 0,
"z": 15
},
"rotation": {
"y": -0.7
},
"portal_type": "creative-tool",
"world_category": "audio-visual",
"environment": "production",
"access_mode": "visitor",
"readiness_state": "online",
"telemetry_source": "hermes-harness:gate",
"owner": "Timmy"
"readiness_steps": {
"prototype": {
"label": "Prototype",
"done": true
},
"runtime_ready": {
"label": "Runtime Ready",
"done": true
},
"launched": {
"label": "Launched",
"done": true
},
"harness_bridged": {
"label": "Harness Bridged",
"done": true
}
},
"blocked_reason": null,
"telemetry_source": "playground",
"owner": "Timmy",
"destination": {
"url": "./playground/playground.html",
"type": "local",
"action_label": "Enter Playground",
"params": {}
},
"agents_present": [],
"interaction_ready": true
}
]

View File

@@ -7,7 +7,6 @@ the body (Evennia/Morrowind), and the visualization surface.
import asyncio
import json
import logging
import os
import signal
import sys
from typing import Set
@@ -18,8 +17,6 @@ import websockets
# Configuration
PORT = 8765
HOST = "0.0.0.0" # Allow external connections if needed
PORTALS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "portals.json")
PORTALS_POLL_INTERVAL = 2.0 # seconds
# Logging setup
logging.basicConfig(
@@ -82,39 +79,6 @@ async def broadcast_handler(websocket: websockets.WebSocketServerProtocol):
clients.discard(websocket)
logger.info(f"Client disconnected {addr}. Total clients: {len(clients)}")
async def watch_portals(stop_event: asyncio.Future):
"""Poll portals.json for changes and broadcast reload to all clients."""
last_mtime = 0.0
try:
last_mtime = os.path.getmtime(PORTALS_FILE)
except OSError:
logger.warning(f"portals.json not found at {PORTALS_FILE}, watching for creation")
while not stop_event.done():
await asyncio.sleep(PORTALS_POLL_INTERVAL)
if stop_event.done():
break
try:
current_mtime = os.path.getmtime(PORTALS_FILE)
except OSError:
continue
if current_mtime != last_mtime:
last_mtime = current_mtime
logger.info("portals.json changed — broadcasting reload")
msg = json.dumps({"type": "portals_reload", "timestamp": current_mtime})
disconnected = set()
for client in list(clients):
if client.open:
try:
await client.send(msg)
except Exception:
disconnected.add(client)
if disconnected:
clients.difference_update(disconnected)
logger.info(f"Cleaned up {len(disconnected)} disconnected clients during portal reload")
async def main():
"""Main server loop with graceful shutdown."""
logger.info(f"Starting Nexus WS gateway on ws://{HOST}:{PORT}")
@@ -136,13 +100,7 @@ async def main():
async with websockets.serve(broadcast_handler, HOST, PORT):
logger.info("Gateway is ready and listening.")
watcher_task = asyncio.create_task(watch_portals(stop))
await stop
watcher_task.cancel()
try:
await watcher_task
except asyncio.CancelledError:
pass
logger.info("Shutting down Nexus WS gateway...")
# Close any remaining client connections (handlers may have already cleaned up)

View File

@@ -0,0 +1,25 @@
from pathlib import Path
NEXUS_ROOT = Path(__file__).resolve().parent.parent
def test_deploy_sh_header_comments_match_live_ports():
deploy_sh = (NEXUS_ROOT / "deploy.sh").read_text()
assert "(port 8765)" in deploy_sh, "deploy.sh should document nexus-main on port 8765"
assert "(port 8766)" in deploy_sh, "deploy.sh should document nexus-staging on port 8766"
assert "4200" not in deploy_sh, "stale 4200 comment should not remain in deploy.sh"
assert "4201" not in deploy_sh, "stale 4201 comment should not remain in deploy.sh"
def test_deploy_sh_comments_match_docker_compose_bindings():
deploy_sh = (NEXUS_ROOT / "deploy.sh").read_text().splitlines()
compose = (NEXUS_ROOT / "docker-compose.yml").read_text()
main_comment = next(line for line in deploy_sh if "nexus-main" in line and "port" in line)
staging_comment = next(line for line in deploy_sh if "nexus-staging" in line and "port" in line)
assert '"8765:8765"' in compose, "docker-compose should expose nexus-main on 8765"
assert '"8766:8765"' in compose, "docker-compose should expose nexus-staging via host port 8766"
assert "8765" in main_comment, "nexus-main deploy comment should cite 8765"
assert "8766" in staging_comment, "nexus-staging deploy comment should cite 8766"