Compare commits

..

1 Commits

Author SHA1 Message Date
Alexander Whitestone
5ff0257c0f feat: 3D audio spatial chat — volume based on distance (fix #1544)
Some checks failed
Review Approval Gate / verify-review (pull_request) Failing after 11s
CI / test (pull_request) Failing after 1m14s
CI / validate (pull_request) Failing after 1m17s
- Add SpatialChatAudio component with distance-based volume rolloff
- Each agent gets distinct tone (freq + waveform)
- Stereo panning based on relative position to camera
- HRTF PannerNode support for voice chat streams
- Integrate with addChatMessage for spatial notifications
- Tests + docs included
2026-04-22 02:55:20 -04:00
8 changed files with 450 additions and 373 deletions

144
app.js
View File

@@ -5,6 +5,7 @@ import { UnrealBloomPass } from 'three/addons/postprocessing/UnrealBloomPass.js'
import { SMAAPass } from 'three/addons/postprocessing/SMAAPass.js';
import { SpatialMemory } from './nexus/components/spatial-memory.js';
import { SpatialAudio } from './nexus/components/spatial-audio.js';
import { SpatialChatAudio } from './nexus/components/spatial-chat-audio.js';
import { MemoryBirth } from './nexus/components/memory-birth.js';
import { MemoryOptimizer } from './nexus/components/memory-optimizer.js';
import { MemoryInspect } from './nexus/components/memory-inspect.js';
@@ -104,13 +105,7 @@ const orbitState = {
let flyY = 2;
// ══ POV CAMERA SYSTEM ══
let povMode = false; // true when viewing through agent's eyes
let povAgentIdx = -1; // index into agents[] for POV target (-1 = none)
let savedCameraState = null; // { position: Vector3, rotation: Euler } to restore on exit
const DEFAULT_AGENT_FOV = 75; // default field-of-view for agent POV cameras
// ╡══ INIT ══╡
// ═══ INIT ══
import {
SymbolicEngine, AgentFSM, KnowledgeGraph, Blackboard,
@@ -774,6 +769,7 @@ async function init() {
SpatialMemory.setCamera(camera);
SpatialAudio.init(camera, scene);
SpatialAudio.bindSpatialMemory(SpatialMemory);
SpatialChatAudio.init(camera);
MemoryInspect.init({ onNavigate: _navigateToMemory });
MemoryPulse.init(SpatialMemory);
ReasoningTrace.init();
@@ -1340,10 +1336,10 @@ function updateNexusCommand(state) {
// ═══ AGENT PRESENCE SYSTEM ═══
function createAgentPresences() {
const agentData = [
{ id: 'timmy', name: 'TIMMY', color: NEXUS.colors.primary, pos: { x: -4, z: -4 }, station: { x: -4, z: -4 }, fov: 70 },
{ id: 'kimi', name: 'KIMI', color: NEXUS.colors.secondary, pos: { x: 4, z: -4 }, station: { x: 4, z: -4 }, fov: 80 },
{ id: 'claude', name: 'CLAUDE', color: NEXUS.colors.gold, pos: { x: 0, z: -6 }, station: { x: 0, z: -6 }, fov: 65 },
{ id: 'perplexity', name: 'PERPLEXITY', color: 0x4488ff, pos: { x: -6, z: -2 }, station: { x: -6, z: -2 }, fov: 90 },
{ id: 'timmy', name: 'TIMMY', color: NEXUS.colors.primary, pos: { x: -4, z: -4 }, station: { x: -4, z: -4 } },
{ id: 'kimi', name: 'KIMI', color: NEXUS.colors.secondary, pos: { x: 4, z: -4 }, station: { x: 4, z: -4 } },
{ id: 'claude', name: 'CLAUDE', color: NEXUS.colors.gold, pos: { x: 0, z: -6 }, station: { x: 0, z: -6 } },
{ id: 'perplexity', name: 'PERPLEXITY', color: 0x4488ff, pos: { x: -6, z: -2 }, station: { x: -6, z: -2 } },
];
agentData.forEach(data => {
@@ -1399,8 +1395,7 @@ function createAgentPresences() {
color,
station: data.station,
targetPos: new THREE.Vector3(data.pos.x, 0, data.pos.z),
wanderTimer: 0,
fov: data.fov || DEFAULT_AGENT_FOV,
wanderTimer: 0
});
});
}
@@ -1964,97 +1959,7 @@ function updateNavModeUI(mode) {
if (el) el.textContent = mode.toUpperCase();
}
// ══ AGENT POV CAMERA TOGGLE ══
function toggleAgentPOV() {
if (!agents.length) {
addChatMessage('system', 'No agents present to observe.');
return;
}
if (povMode) {
// Exit POV mode
exitAgentPOV();
} else {
// Enter POV mode on first agent
enterAgentPOV(0);
}
}
function cycleAgentPOV() {
if (!agents.length) return;
if (!povMode) {
enterAgentPOV(0);
return;
}
const nextIdx = (povAgentIdx + 1) % agents.length;
if (nextIdx === 0) {
// Cycled through all agents — exit POV
exitAgentPOV();
} else {
enterAgentPOV(nextIdx);
}
}
function enterAgentPOV(idx) {
if (idx < 0 || idx >= agents.length) return;
// Save current camera state before switching
if (!povMode) {
savedCameraState = {
position: camera.position.clone(),
rotation: camera.rotation.clone(),
fov: camera.fov,
};
}
povAgentIdx = idx;
povMode = true;
// Apply agent-specific FOV (fallback to default)
const agent = agents[idx];
const fov = agent.fov || DEFAULT_AGENT_FOV;
camera.fov = fov;
camera.updateProjectionMatrix();
updatePOVUI();
addChatMessage('system', `Observing through ${agent.id.toUpperCase()}'s eyes. FOV: ${fov}°`);
}
function exitAgentPOV() {
if (!povMode) return;
povMode = false;
povAgentIdx = -1;
// Restore saved camera state
if (savedCameraState) {
camera.position.copy(savedCameraState.position);
camera.rotation.copy(savedCameraState.rotation);
camera.fov = savedCameraState.fov;
camera.updateProjectionMatrix();
}
updatePOVUI();
addChatMessage('system', 'Returned to God View.');
}
function updatePOVUI() {
const label = document.getElementById('pov-label');
const btn = document.getElementById('pov-toggle-btn');
if (!label || !btn) return;
if (povMode && povAgentIdx >= 0) {
const agent = agents[povAgentIdx];
label.textContent = agent.id.toUpperCase();
btn.classList.add('pov-active');
} else {
label.textContent = 'AGENT POV';
btn.classList.remove('pov-active');
}
}
// ╡══ CONTROLS ══╡
// ══ CONTROLS ══
function setupControls() {
document.addEventListener('keydown', (e) => {
keys[e.key.toLowerCase()] = true;
@@ -2081,9 +1986,6 @@ function setupControls() {
if (e.key.toLowerCase() === 'v' && document.activeElement !== document.getElementById('chat-input')) {
cycleNavMode();
}
if (e.key.toLowerCase() === 'p' && document.activeElement !== document.getElementById('chat-input')) {
cycleAgentPOV();
}
if (e.key.toLowerCase() === 'f' && activePortal && !portalOverlayActive) {
activatePortal(activePortal);
}
@@ -2233,7 +2135,6 @@ function setupControls() {
document.getElementById('vision-close-btn').addEventListener('click', closeVisionOverlay);
document.getElementById('mode-toggle-btn').addEventListener('click', toggleUIMode);
document.getElementById('pov-toggle-btn').addEventListener('click', cycleAgentPOV);
document.getElementById('atlas-toggle-btn').addEventListener('click', openPortalAtlas);
document.getElementById('atlas-close-btn').addEventListener('click', closePortalAtlas);
initAtlasControls();
@@ -3067,6 +2968,17 @@ function loadSession() {
function addChatMessage(agent, text, shouldSave = true) {
// Mine chat messages for MemPalace
mineMemPalaceContent(text);
// 3D spatial audio notification (issue #1544)
if (window.SpatialChatAudio && agent !== 'system') {
// Find agent position from tracked agents or fallback to origin
let pos = { x: 0, y: 0, z: 0 };
const agentEntry = Array.isArray(window._trackedAgents) && window._trackedAgents.find(a => a.name === agent);
if (agentEntry && agentEntry.position) {
pos = agentEntry.position;
}
window.SpatialChatAudio.playChatSound(agent, new THREE.Vector3(pos.x, pos.y, pos.z));
}
// Mine chat messages for MemPalace
mineMemPalaceContent(text);
const container = document.getElementById('chat-messages');
@@ -3471,21 +3383,7 @@ function gameLoop() {
const mode = NAV_MODES[navModeIdx];
const chatActive = document.activeElement === document.getElementById('chat-input');
// Agent POV mode overrides other camera modes
if (povMode && povAgentIdx >= 0 && agents[povAgentIdx]) {
const agent = agents[povAgentIdx];
const orbPos = agent.orb.getWorldPosition(new THREE.Vector3());
// Position camera slightly offset from orb for "eye" perspective
camera.position.copy(orbPos);
camera.position.y += 0.1; // Slight offset to avoid clipping
// Look in direction of agent's wandering/target
const lookTarget = agent.targetPos.clone();
lookTarget.y = camera.position.y;
camera.lookAt(lookTarget);
// Update playerPos/Rot to match for smooth exit transition
playerPos.copy(camera.position);
playerRot.y = Math.atan2(lookTarget.x - camera.position.x, lookTarget.z - camera.position.z);
} else if (mode === 'walk') {
if (mode === 'walk') {
if (!chatActive && !portalOverlayActive) {
const speed = 6 * delta;
const dir = new THREE.Vector3();

View File

@@ -1,98 +0,0 @@
# Nostr Migration Consolidation Plan
> Issue #862 | Canonical Epic: the-nexus #819
> Consolidated From: the-nexus #819 + timmy-config #138
---
## Problem
Two epics tracked the same Telegram -> Nostr migration with overlapping scope:
| Epic | Repo | Focus | Status |
|------|------|-------|--------|
| #819 | the-nexus | Client fork (Nostur), UI/UX, agent presence | **CANONICAL** |
| #138 | timmy-config | Relay/infrastructure, deployment, ops | Tracked child |
Neither was the parent. Work risked duplication and drift.
---
## Resolution
**#819 is the canonical parent epic.** All Nostr migration work rolls up here.
### Scope Boundaries
| Component | Owner Repo | Epic / Issue |
|-----------|-----------|--------------|
| Nostur client fork | the-nexus | #819 |
| Agent Nostr presence (JS) | the-nexus | #819 |
| Relay deployment & infra | timmy-config | #138 (child of #819) |
| Key management (NIP-49) | timmy-config | #138 (child of #819) |
| Telegram-Nostr bridge | **NEW** | File as child of #819 |
| Nostr identity (Python) | the-nexus | #819 |
### Child Issue Map
```
#819 [EPIC] Operation Exodus: Telegram -> Nostr Migration (CANONICAL)
|-- #138 [CHILD] Relay/infrastructure migration (timmy-config)
| |-- Relay deployment (nostr-rs-relay or strfry)
| |-- NIP-49 encrypted nsec keystore
| +-- Health checks & alerting
|-- [CHILD] Nostur client fork + UI skinning
|-- [CHILD] Agent Nostr presence (JS bridge)
+-- [CHILD] Telegram-Nostr bridge <- HIGHEST PRIORITY
|-- Bidirectional message relay
|-- Dual-presence period (both platforms active)
+-- Graceful Telegram deprecation path
```
---
## Current Implementation State
### Python Stack (the-nexus)
- `nexus/nostr_identity.py` - Pure-Python BIP340 Schnorr signatures
- WARNING **Timing side-channel vulnerabilities** (see FINDINGS-issue-801.md)
- Suitable for prototyping; production needs `coincurve` or constant-time rewrite
- `nexus/nostr_publisher.py` - Async WebSocket publisher to public relays
### Browser Stack (the-nexus)
- `app.js:NostrAgent` - Browser-side agent presence
- WARNING Uses **mock signatures** (`mock_id`, `mock_sig`)
- Needs real crypto integration or delegation to Python backend
### Infrastructure (timmy-config)
- `nostr-bridge.service` - Running but source file deleted, only `.pyc` remains
- `/root/nostr-relay/keystore.json` - NIP-49 encrypted nsec storage
---
## Highest Priority: Telegram-Nostr Bridge
The bridge is the critical path. Without it, migration strands users on Telegram.
**Requirements:**
1. Bidirectional message relay (Telegram <-> Nostr)
2. Dual-presence period: both platforms active during transition
3. Graceful deprecation: Telegram bot stays online until 90% of active users have Nostr handles
4. Channel/topic mapping: preserve conversation structure
**File this as a new child issue under #819.**
---
## Action Items
- [ ] Close #138 in timmy-config with comment: "Consolidated into the-nexus #819. Relay/infrastructure work tracked as child of canonical epic."
- [ ] Update #819 title/body to reference this consolidation plan
- [ ] File child issue: Telegram-Nostr bridge (bidirectional, dual-presence)
- [ ] File child issue: Fix timing side-channel in `nostr_identity.py` (or replace with `coincurve`)
- [ ] File child issue: Replace mock signatures in `app.js:NostrAgent` with real crypto
- [ ] Assign owners to each child issue
---
*Sovereignty and service always.*

View File

@@ -1,140 +0,0 @@
# Telegram-Nostr Bridge Specification
> Child of Epic #819 (Operation Exodus: Telegram -> Nostr Migration)
> Priority: HIGHEST
---
## Overview
Bidirectional message relay between Telegram and Nostr during the migration period.
Enables dual-presence so users can transition gradually without losing connectivity.
---
## Requirements
### Functional
1. **Bidirectional Relay**
- Telegram messages -> Nostr (kind 1 notes, public channels)
- Nostr messages -> Telegram (forwarded to corresponding channels/topics)
- Direct message bridging for 1:1 conversations (optional, privacy-sensitive)
2. **Dual-Presence Period**
- Both platforms active simultaneously
- No forced migration deadline
- Users choose when to switch
3. **Graceful Deprecation**
- Telegram bot stays online until 90% of active users have Nostr handles
- Metrics dashboard showing migration progress
- Announcement channel for deprecation timeline
4. **Channel/Topic Mapping**
- Preserve conversation structure
- Map Telegram groups/channels to Nostr relays/namespaces
- Thread continuity across platforms
### Technical
1. **Nostr Side**
- Publish to configured relays (damus.io, nos.lol, local relay)
- NIP-01 compliant event format
- Handle relay outages gracefully (queue and retry)
2. **Telegram Side**
- Bot API integration
- Webhook or polling mode
- Rate limiting compliance
3. **Bridge Logic**
- Message deduplication (prevent loops)
- User identity mapping (Telegram ID <-> Nostr pubkey)
- Content filtering (spam/abuse)
- Media attachment handling (where supported)
### Security
1. **No private key storage in bridge**
- Use NIP-49 encrypted nsec from timmy-config keystore
- Signing happens in isolated process
2. **Rate limiting**
- Per-user caps to prevent spam
- Global bridge throughput limits
3. **Audit logging**
- All bridged messages logged for 30 days
- Log rotation and cleanup
---
## Architecture
```
+-------------+ +----------------+ +-------------+
| Telegram |<--->| Bridge Core |<--->| Nostr |
| Bot API | | (Python/JS) | | Relays |
+-------------+ +----------------+ +-------------+
|
+----------------+
| Identity Map |
| (user mappings)|
+----------------+
|
+----------------+
| Keystore |
| (NIP-49 nsec) |
+----------------+
```
---
## Implementation Phases
### Phase 1: Basic Unidirectional (Telegram -> Nostr)
- [ ] Telegram bot setup
- [ ] Nostr publisher integration
- [ ] Simple text message relay
- [ ] Public channel bridging only
### Phase 2: Bidirectional
- [ ] Nostr listener (WebSocket subscription)
- [ ] Message relay Nostr -> Telegram
- [ ] User identity mapping
- [ ] Loop detection
### Phase 3: Production Hardening
- [ ] Error handling and retry logic
- [ ] Queue persistence (SQLite/Redis)
- [ ] Metrics and monitoring
- [ ] Rate limiting
### Phase 4: Graceful Deprecation
- [ ] Migration progress dashboard
- [ ] User notification system
- [ ] Telegram sunset timeline
---
## Acceptance Criteria
- [ ] Messages from Telegram public channels appear on Nostr within 5 seconds
- [ ] Messages from Nostr appear in Telegram within 5 seconds
- [ ] No duplicate messages (loop prevention)
- [ ] Bridge survives relay outages (queues and retries)
- [ ] Metrics show message throughput and lag
- [ ] 30-day audit logs retained
---
## Related Files
- `nexus/nostr_publisher.py` - Nostr publishing (reusable)
- `nexus/nostr_identity.py` - Signing (needs hardening)
- `docs/nostr-migration/CONSOLIDATION.md` - Parent epic context
---
*Part of Operation Exodus.*

View File

@@ -0,0 +1,68 @@
# Spatial Chat Audio — 3D Audio for Chat Messages
Refs: the-nexus #1544
## Overview
Adds spatial awareness to chat notifications so nearby users/agents sound louder.
Volume scales with avatar distance from the camera.
## Features
### Chat Notification Sounds
- Each agent has a distinct tone (frequency + waveform)
- Volume decreases with distance (inverse rolloff)
- Stereo panning based on relative position to camera
- Sounds auto-cleanup after playback
### 3D Positional Voice (WebRTC-ready)
- `createVoiceSource()` returns a PannerNode for real voice streams
- HRTF panning model for realistic 3D positioning
- Update position in real-time as avatars move
### Configurable Parameters
- `maxHearingDistance` — max distance to hear sounds (default: 40)
- `refDistance` — full volume within this range (default: 5)
- `rolloffFactor` — volume falloff curve (default: 1.5)
- `baseVolume` — master volume cap (default: 0.3)
## Usage
```javascript
import { SpatialChatAudio } from './nexus/components/spatial-chat-audio.js';
// Initialize with camera
SpatialChatAudio.init(camera);
// Set max hearing distance
SpatialChatAudio.setMaxHearingDistance(50);
// Play a chat sound when a message arrives
// position = avatar/agent position in 3D world
SpatialChatAudio.playChatSound('timmy', agentPosition);
// For voice chat: create a persistent 3D source
const voice = SpatialChatAudio.createVoiceSource('user', avatarPosition);
// Update as avatar moves
voice.updatePosition(newPosition);
// Cleanup when disconnected
voice.destroy();
```
## Agent Sound Profiles
| Agent | Frequency | Waveform |
|--------|-----------|------------|
| timmy | 440 Hz | sine |
| user | 523 Hz | sine |
| system | 330 Hz | triangle |
| kimi | 659 Hz | sine |
| claude | 392 Hz | sine |
| grok | 587 Hz | triangle |
| gemini | 494 Hz | sine |
## Testing
```bash
node tests/test_spatial_chat_audio.js
```

View File

@@ -173,10 +173,6 @@
<span class="hud-icon">👁</span>
<span class="hud-btn-label" id="mode-label">VISITOR</span>
</button>
<button id="pov-toggle-btn" class="hud-icon-btn" title="Agent POV Camera">
<span class="hud-icon">👁</span>
<span class="hud-btn-label" id="pov-label">AGENT POV</span>
</button>
<button id="atlas-toggle-btn" class="hud-icon-btn" title="Portal Atlas">
<span class="hud-icon">🌐</span>
<span class="hud-btn-label">WORLDS</span>
@@ -233,7 +229,6 @@
<span>WASD</span> move &nbsp; <span>Mouse</span> look &nbsp; <span>Enter</span> chat &nbsp;
<span>V</span> mode: <span id="nav-mode-label">WALK</span>
<span id="nav-mode-hint" class="nav-mode-hint"></span>
&nbsp; <span>P</span> agent POV &nbsp;
&nbsp; <span>H</span> archive &nbsp;
<span class="ws-hud-status">HERMES: <span id="ws-status-dot" class="chat-status-dot"></span></span>
</div>

View File

@@ -0,0 +1,236 @@
// ════════════════════════════════════════════════════════════════════════════════════════════════════════════════════════════
// SPATIAL CHAT AUDIO — 3D Audio for Chat Messages (issue #1544)
// ════════════════════════════════════════════════════════════════════════════════════════════════════════════════════════════
//
// Volume scales with avatar distance — closer agents sound louder.
// 3D positional audio places chat sounds in world space.
//
// Usage from app.js:
// import { SpatialChatAudio } from './nexus/components/spatial-chat-audio.js';
// SpatialChatAudio.init(camera);
// SpatialChatAudio.playChatSound('timmy', agentPosition);
//
// Configuration:
// SpatialChatAudio.setMaxHearingDistance(50); // default 40 units
// SpatialChatAudio.setEnabled(true/false);
// ═══════════════════════════════════════════════════════════════════════════════════════════════════════════════════════════
const SpatialChatAudio = (() => {
// ─── CONFIG ───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
let _config = {
maxHearingDistance: 40, // Distance at which volume reaches 0
refDistance: 5, // Full volume within this range
rolloffFactor: 1.5, // Volume rolloff curve
baseVolume: 0.3, // Master volume for chat sounds
enabled: true,
};
// ─── STATE ──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
let _camera = null;
let _listener = null;
let _ctx = null;
let _masterGain = null;
let _initialized = false;
// Agent sound profiles (frequency + waveform)
const AGENT_SOUNDS = {
timmy: { freq: 440, type: 'sine' }, // A4 - clear
user: { freq: 523, type: 'sine' }, // C5 - higher
system: { freq: 330, type: 'triangle' }, // E4 - neutral
kimi: { freq: 659, type: 'sine' }, // E5 - bright
claude: { freq: 392, type: 'sine' }, // G4 - warm
grok: { freq: 587, type: 'triangle' }, // D5 - sharp
gemini: { freq: 494, type: 'sine' }, // B4 - balanced
default: { freq: 440, type: 'sine' }, // A4 - default
};
// ─── INIT ──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
function init(camera) {
if (_initialized) return;
_camera = camera;
// Create or reuse AudioListener from camera
if (!_listener) {
_listener = new THREE.AudioListener();
camera.add(_listener);
} else {
_listener = camera.getObjectByProperty('type', 'AudioListener') || _listener;
}
// Get audio context from listener
_ctx = _listener.context;
_masterGain = _ctx.createGain();
_masterGain.gain.value = _config.baseVolume;
_masterGain.connect(_ctx.destination);
_initialized = true;
console.info('[SpatialChatAudio] Initialized — max hearing distance:', _config.maxHearingDistance);
// Resume context if suspended (browser autoplay policy)
if (_ctx.state === 'suspended') {
const resume = () => {
_ctx.resume().then(() => {
console.info('[SpatialChatAudio] AudioContext resumed');
document.removeEventListener('click', resume);
document.removeEventListener('keydown', resume);
});
};
document.addEventListener('click', resume);
document.addEventListener('keydown', resume);
}
return _listener;
}
// ─── PLAY CHAT SOUND ─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
function playChatSound(agent, position) {
if (!_initialized || !_config.enabled) return;
const sound = AGENT_SOUNDS[agent] || AGENT_SOUNDS.default;
const camPos = _camera.position;
// Calculate distance
const dx = position.x - camPos.x;
const dy = position.y - camPos.y;
const dz = position.z - camPos.z;
const dist = Math.sqrt(dx * dx + dy * dy + dz * dz);
// Calculate volume based on distance
let volume = 0;
if (dist < _config.maxHearingDistance) {
volume = 1 / (1 + _config.rolloffFactor * (dist - _config.refDistance));
volume = Math.max(0, Math.min(1, volume));
}
// Skip if too quiet
if (volume < 0.01) return;
// Create audio nodes
const osc = _ctx.createOscillator();
osc.type = sound.type;
osc.frequency.value = sound.freq;
const gain = _ctx.createGain();
gain.gain.value = volume;
const panner = _ctx.createStereoPanner();
// Calculate stereo panning
const camRight = new THREE.Vector3();
_camera.getWorldDirection(camRight);
camRight.cross(_camera.up).normalize();
const toSource = new THREE.Vector3(dx, 0, dz).normalize();
const pan = THREE.MathUtils.clamp(toSource.dot(camRight), -1, 1);
panner.pan.value = pan;
// Connect and play
osc.connect(gain);
gain.connect(panner);
panner.connect(_masterGain);
// Short envelope (attack + decay)
const now = _ctx.currentTime;
gain.gain.setValueAtTime(0, now);
gain.gain.linearRampToValueAtTime(volume, now + 0.01);
gain.gain.exponentialRampToValueAtTime(0.001, now + 0.3);
osc.start(now);
osc.stop(now + 0.35);
// Cleanup
osc.onended = () => {
osc.disconnect();
gain.disconnect();
panner.disconnect();
};
console.debug(`[SpatialChatAudio] ${agent} at ${dist.toFixed(1)}m, vol=${volume.toFixed(2)}, pan=${pan.toFixed(2)}`);
}
// ─── CONFIGURATION ─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
function setMaxHearingDistance(dist) {
_config.maxHearingDistance = Math.max(5, dist);
console.info('[SpatialChatAudio] Max hearing distance:', _config.maxHearingDistance);
}
function getMaxHearingDistance() {
return _config.maxHearingDistance;
}
function setEnabled(enabled) {
_config.enabled = enabled;
console.info('[SpatialChatAudio]', enabled ? 'Enabled' : 'Disabled');
}
function isEnabled() {
return _config.enabled;
}
function setMasterVolume(vol) {
if (_masterGain) {
_masterGain.gain.setTargetAtTime(
THREE.MathUtils.clamp(vol, 0, 1),
_ctx.currentTime,
0.05
);
}
}
// ─── VOICE CHAT SUPPORT (WebRTC placeholder) ─────────────────────────────────────────────────────────────────────────────────────────
function createVoiceSource(agentId, position) {
if (!_initialized) return null;
// Create a PannerNode for 3D voice positioning
const panner = _ctx.createPanner();
panner.panningModel = 'HRTF';
panner.distanceModel = 'inverse';
panner.refDistance = _config.refDistance;
panner.maxDistance = _config.maxHearingDistance;
panner.rolloffFactor = _config.rolloffFactor;
// Set initial position
panner.positionX.value = position.x;
panner.positionY.value = position.y;
panner.positionZ.value = position.z;
// Connect to master
panner.connect(_masterGain);
console.info(`[SpatialChatAudio] Voice source created for ${agentId}`);
return {
panner,
agentId,
updatePosition(pos) {
panner.positionX.setValueAtTime(pos.x, _ctx.currentTime);
panner.positionY.setValueAtTime(pos.y, _ctx.currentTime);
panner.positionZ.setValueAtTime(pos.z, _ctx.currentTime);
},
destroy() {
panner.disconnect();
console.info(`[SpatialChatAudio] Voice source destroyed for ${agentId}`);
}
};
}
// ─── API ─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
return {
init,
playChatSound,
createVoiceSource,
setMaxHearingDistance,
getMaxHearingDistance,
setEnabled,
isEnabled,
setMasterVolume,
};
})();
// Export for module or global usage
if (typeof module !== 'undefined' && module.exports) {
module.exports = { SpatialChatAudio };
} else if (typeof window !== 'undefined') {
window.SpatialChatAudio = SpatialChatAudio;
}

View File

@@ -200,13 +200,6 @@ canvas#nexus-canvas {
box-shadow: 0 0 20px var(--color-primary);
}
.hud-icon-btn.pov-active {
background: var(--color-gold);
border-color: var(--color-gold);
color: var(--color-bg);
box-shadow: 0 0 20px var(--color-gold);
}
.hud-status-item {
display: flex;
align-items: center;

View File

@@ -0,0 +1,125 @@
/**
* Tests for SpatialChatAudio component (issue #1544)
*/
import { SpatialChatAudio } from '../nexus/components/spatial-chat-audio.js';
// Mock DOM and THREE for Node.js testing
if (typeof document === 'undefined') {
global.document = {
addEventListener: () => {},
removeEventListener: () => {},
};
}
if (typeof THREE === 'undefined') {
global.THREE = {
Vector3: class {
constructor(x=0, y=0, z=0) { this.x = x; this.y = y; this.z = z; }
normalize() { return this; }
dot() { return 0; }
cross() { return this; }
},
MathUtils: { clamp: (v, min, max) => Math.max(min, Math.min(max, v)) },
AudioListener: class {
constructor() {
this.context = {
state: 'running',
currentTime: 0,
createOscillator: () => ({
type: 'sine',
frequency: { value: 440 },
connect: () => {},
start: () => {},
stop: () => {},
disconnect: () => {},
onended: null,
}),
createGain: () => ({
gain: { value: 1, setValueAtTime: () => {}, linearRampToValueAtTime: () => {}, exponentialRampToValueAtTime: () => {}, setTargetAtTime: () => {} },
connect: () => {},
disconnect: () => {},
}),
createStereoPanner: () => ({
pan: { value: 0, setValueAtTime: () => {}, setTargetAtTime: () => {} },
connect: () => {},
disconnect: () => {},
}),
createPanner: () => ({
panningModel: '',
distanceModel: '',
refDistance: 0,
maxDistance: 0,
rolloffFactor: 0,
positionX: { value: 0, setValueAtTime: () => {} },
positionY: { value: 0, setValueAtTime: () => {} },
positionZ: { value: 0, setValueAtTime: () => {} },
connect: () => {},
disconnect: () => {},
}),
resume: () => Promise.resolve(),
destination: {},
};
}
},
};
}
function assert(condition, message) {
if (!condition) {
console.error(`❌ FAILED: ${message}`);
process.exit(1);
}
console.log(`✔ PASSED: ${message}`);
}
console.log('--- Running SpatialChatAudio Tests ---');
// Test 1: Module exports
assert(typeof SpatialChatAudio === 'object', 'SpatialChatAudio exports an object');
assert(typeof SpatialChatAudio.init === 'function', 'SpatialChatAudio has init method');
assert(typeof SpatialChatAudio.playChatSound === 'function', 'SpatialChatAudio has playChatSound method');
assert(typeof SpatialChatAudio.createVoiceSource === 'function', 'SpatialChatAudio has createVoiceSource method');
// Test 2: Config defaults
assert(SpatialChatAudio.isEnabled() === true, 'Enabled by default');
assert(SpatialChatAudio.getMaxHearingDistance() === 40, 'Default max hearing distance is 40');
// Test 3: Configuration changes
SpatialChatAudio.setMaxHearingDistance(60);
assert(SpatialChatAudio.getMaxHearingDistance() === 60, 'Max hearing distance updated to 60');
SpatialChatAudio.setEnabled(false);
assert(SpatialChatAudio.isEnabled() === false, 'Can disable audio');
SpatialChatAudio.setEnabled(true);
assert(SpatialChatAudio.isEnabled() === true, 'Can re-enable audio');
// Test 4: Initialization with mock camera
const mockCamera = {
position: new THREE.Vector3(0, 0, 0),
getWorldDirection: () => new THREE.Vector3(1, 0, 0),
up: new THREE.Vector3(0, 1, 0),
add: () => {},
getObjectByProperty: () => null,
};
SpatialChatAudio.init(mockCamera);
assert(true, 'SpatialChatAudio initializes with camera');
// Test 5: Voice source creation
const voiceSource = SpatialChatAudio.createVoiceSource('timmy', new THREE.Vector3(10, 0, 0));
assert(voiceSource !== null, 'Voice source created');
assert(voiceSource.agentId === 'timmy', 'Voice source has correct agentId');
assert(typeof voiceSource.updatePosition === 'function', 'Voice source has updatePosition');
assert(typeof voiceSource.destroy === 'function', 'Voice source has destroy');
// Test 6: Voice source position update
voiceSource.updatePosition(new THREE.Vector3(20, 0, 0));
assert(true, 'Voice source position updated');
// Test 7: Voice source cleanup
voiceSource.destroy();
assert(true, 'Voice source destroyed');
console.log('--- All SpatialChatAudio Tests Passed ---');