feat: edge intelligence — browser model + silent Nostr signing (#15)
Some checks failed
CI / validate (pull_request) Has been cancelled

- edge-intelligence.js: lazy in-browser LLM inference chain —
  WebLLM (SmolLM2-135M, WebGPU) → Ollama (localhost:11434) → null.
  Nothing downloads until the user clicks the HUD button.
  All inference is async; Three.js render loop never blocked.

- nostr-identity.js: silent Nostr keypair on first visit.
  Generates secp256k1 key via @noble/secp256k1, persists to localStorage.
  Signs NIP-01 events locally — zero extension popup.
  Detects and prefers NIP-07 extension when available.

- app.js: chat pipeline now tries edgeQuery() first, then local fallbacks.
  Animated thinking cursor while inference runs.
  Nostr identity initialised at startup; npub badge shown in HUD.
  Edge AI status badge wired to HUD button.

- index.html + style.css: sovereignty bar in HUD — Edge AI button
  (idle/loading/ready/ollama states) and Nostr npub badge.

Fixes #15

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Alexander Whitestone
2026-03-23 23:24:34 -04:00
parent 75c9a3774b
commit cdefa24111
5 changed files with 416 additions and 14 deletions

83
app.js
View File

@@ -3,6 +3,8 @@ import { EffectComposer } from 'three/addons/postprocessing/EffectComposer.js';
import { RenderPass } from 'three/addons/postprocessing/RenderPass.js';
import { UnrealBloomPass } from 'three/addons/postprocessing/UnrealBloomPass.js';
import { SMAAPass } from 'three/addons/postprocessing/SMAAPass.js';
import { query as edgeQuery, initEdgeModel, getStatus as edgeStatus, onStatusChange as onEdgeStatus } from './edge-intelligence.js';
import { init as nostrInit, getNpub } from './nostr-identity.js';
// ═══════════════════════════════════════════
// NEXUS v1.1 — Portal System Update
@@ -143,6 +145,37 @@ async function init() {
updateLoad(100);
// ── Nostr identity (silent — no popup) ──────────────────────────────────
nostrInit().then(({ npub, source }) => {
const badge = document.getElementById('nostr-npub-badge');
if (badge) {
badge.textContent = npub;
badge.title = source === 'nip07' ? 'NIP-07 extension' : 'Local keypair (localStorage)';
badge.classList.add('visible');
}
}).catch(() => { /* identity init is best-effort */ });
// ── Edge AI status badge ─────────────────────────────────────────────────
onEdgeStatus(s => {
const badge = document.getElementById('edge-ai-badge');
if (!badge) return;
badge.dataset.status = s;
const labels = { idle: 'EDGE AI', loading: 'LOADING…', ready: 'EDGE READY', ollama: 'OLLAMA', error: 'EDGE ERR' };
badge.textContent = labels[s] || s.toUpperCase();
});
const edgeBtn = document.getElementById('edge-ai-btn');
if (edgeBtn) {
edgeBtn.addEventListener('click', () => {
if (edgeStatus() !== 'idle') return;
edgeBtn.disabled = true;
initEdgeModel(progress => {
const badge = document.getElementById('edge-ai-badge');
if (badge && progress.text) badge.title = progress.text;
});
});
}
setTimeout(() => {
document.getElementById('loading-screen').classList.add('fade-out');
const enterPrompt = document.getElementById('enter-prompt');
@@ -1059,26 +1092,48 @@ function setupControls() {
document.getElementById('vision-close-btn').addEventListener('click', closeVisionOverlay);
}
function sendChatMessage() {
const FALLBACK_RESPONSES = [
'Processing your request through the harness…',
'I have noted this in my thought stream.',
'Acknowledged. Routing to the appropriate agent loop.',
'The sovereign space recognises your command.',
'Running analysis. Results will appear on the main terminal.',
'My crystal ball says… yes. Implementing.',
'Understood, Alexander. Adjusting priorities.',
];
async function sendChatMessage() {
const input = document.getElementById('chat-input');
const text = input.value.trim();
if (!text) return;
addChatMessage('user', text);
input.value = '';
setTimeout(() => {
const responses = [
'Processing your request through the harness...',
'I have noted this in my thought stream.',
'Acknowledged. Routing to appropriate agent loop.',
'The sovereign space recognizes your command.',
'Running analysis. Results will appear on the main terminal.',
'My crystal ball says... yes. Implementing.',
'Understood, Alexander. Adjusting priorities.',
];
const resp = responses[Math.floor(Math.random() * responses.length)];
addChatMessage('timmy', resp);
}, 500 + Math.random() * 1000);
input.blur();
// Animated thinking indicator
const thinkId = 'thinking-' + Date.now();
addChatMessage('timmy', '<span id="' + thinkId + '" class="thinking-dot">▍</span>');
const removeThinking = () => {
const el = document.getElementById(thinkId);
if (el) el.closest('.chat-msg').remove();
};
try {
const reply = await edgeQuery(text);
removeThinking();
if (reply) {
addChatMessage('timmy', reply);
} else {
// Edge + Ollama both unavailable — use local fallback
const resp = FALLBACK_RESPONSES[Math.floor(Math.random() * FALLBACK_RESPONSES.length)];
addChatMessage('timmy', resp);
}
} catch {
removeThinking();
const resp = FALLBACK_RESPONSES[Math.floor(Math.random() * FALLBACK_RESPONSES.length)];
addChatMessage('timmy', resp);
}
}
function addChatMessage(type, text) {

124
edge-intelligence.js Normal file
View File

@@ -0,0 +1,124 @@
// ═══════════════════════════════════════════
// EDGE INTELLIGENCE — In-Browser LLM Inference
// ═══════════════════════════════════════════
// Inference chain: WebLLM (WebGPU) → Ollama (local server) → null
// Lazy-loaded: nothing downloads until the user opts in via the HUD button.
// All inference is async — never blocks the Three.js render loop.
const WEBLLM_CDN = 'https://cdn.jsdelivr.net/npm/@mlc-ai/web-llm@0.2.79/+esm';
const WEBLLM_MODEL = 'SmolLM2-135M-Instruct-q0f16-MLC';
const OLLAMA_URL = 'http://localhost:11434/api/generate';
const OLLAMA_MODEL = 'llama3.2';
// ─── state ───────────────────────────────────────────────────────────────────
let engine = null; // WebLLM engine once loaded
let _status = 'idle'; // idle | loading | ready | ollama | error
let _listeners = [];
function setStatus(s) {
_status = s;
_listeners.forEach(fn => fn(s));
}
// ─── public API ──────────────────────────────────────────────────────────────
/** Current status string. */
export function getStatus() { return _status; }
/** Register a callback that fires whenever status changes. */
export function onStatusChange(fn) { _listeners.push(fn); }
/**
* Begin loading the in-browser model (WebLLM).
* Safe to call multiple times — subsequent calls are no-ops.
* @param {function(object): void} [onProgress] - called with WebLLM progress objects
*/
export async function initEdgeModel(onProgress) {
if (_status !== 'idle') return;
if (!navigator.gpu) {
// WebGPU unavailable — skip straight to Ollama-backed mode
setStatus('ollama');
console.info('[Edge] WebGPU not available — using Ollama backend');
return;
}
setStatus('loading');
try {
const webllm = await import(WEBLLM_CDN);
engine = await webllm.CreateMLCEngine(WEBLLM_MODEL, {
initProgressCallback: progress => {
if (onProgress) onProgress(progress);
},
});
setStatus('ready');
console.info('[Edge] WebLLM engine ready:', WEBLLM_MODEL);
} catch (err) {
console.warn('[Edge] WebLLM failed to load:', err.message);
engine = null;
setStatus('ollama');
}
}
/**
* Query the best available inference backend.
* Returns the response string, or null if every backend failed.
* @param {string} userMessage
* @returns {Promise<string|null>}
*/
export async function query(userMessage) {
if (_status === 'ready' && engine) {
const reply = await _queryWebLLM(userMessage);
if (reply) return reply;
}
// Ollama — complements (not replaces) the in-browser model
return _queryOllama(userMessage);
}
// ─── internal backends ───────────────────────────────────────────────────────
async function _queryWebLLM(userMessage) {
try {
const result = await engine.chat.completions.create({
messages: [
{
role: 'system',
content:
'You are Timmy — a sovereign AI living in the Nexus, a luminous 3D space. ' +
'Reply with warmth and cosmic brevity. One or two sentences only.',
},
{ role: 'user', content: userMessage },
],
max_tokens: 80,
temperature: 0.7,
});
return result.choices[0]?.message?.content?.trim() || null;
} catch (err) {
console.warn('[Edge] WebLLM query error:', err.message);
return null;
}
}
async function _queryOllama(userMessage) {
try {
const resp = await fetch(OLLAMA_URL, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
signal: AbortSignal.timeout(8000),
body: JSON.stringify({
model: OLLAMA_MODEL,
prompt:
'You are Timmy — a sovereign AI living in the Nexus. ' +
'Reply with warmth and cosmic brevity. One or two sentences.\n\n' +
`User: ${userMessage}\nTimmy:`,
stream: false,
}),
});
if (!resp.ok) return null;
const data = await resp.json();
return data.response?.trim() || null;
} catch {
return null;
}
}

View File

@@ -101,6 +101,14 @@
</div>
</div>
<!-- Bottom-left: Edge AI + Nostr badges -->
<div class="hud-sovereignty">
<button id="edge-ai-btn" class="hud-edge-btn" title="Activate in-browser AI (WebLLM)">
<span id="edge-ai-badge" data-status="idle">EDGE AI</span>
</button>
<span id="nostr-npub-badge" class="hud-npub-badge" title="Nostr identity"></span>
</div>
<!-- Controls hint + nav mode -->
<div class="hud-controls">
<span>WASD</span> move &nbsp; <span>Mouse</span> look &nbsp; <span>Enter</span> chat &nbsp;

143
nostr-identity.js Normal file
View File

@@ -0,0 +1,143 @@
// ═══════════════════════════════════════════
// NOSTR IDENTITY — Silent Signing, No Extension Popup
// ═══════════════════════════════════════════
// Generates a secp256k1 keypair on first visit and persists it in localStorage.
// Signs NIP-01 events locally — no window.nostr extension required.
// If window.nostr (NIP-07) is present it is preferred; our key acts as fallback.
const NOBLE_SECP_CDN = 'https://cdn.jsdelivr.net/npm/@noble/secp256k1@2.1.0/+esm';
const LS_KEY = 'nexus-nostr-privkey';
// ─── module state ────────────────────────────────────────────────────────────
let _privKeyBytes = null; // Uint8Array (32 bytes)
let _pubKeyHex = null; // hex string (32 bytes / x-only)
let _usingNip07 = false;
let _secp = null; // lazy-loaded @noble/secp256k1
// ─── helpers ─────────────────────────────────────────────────────────────────
function hexToBytes(hex) {
const b = new Uint8Array(hex.length / 2);
for (let i = 0; i < hex.length; i += 2) b[i / 2] = parseInt(hex.slice(i, i + 2), 16);
return b;
}
function bytesToHex(bytes) {
return Array.from(bytes, b => b.toString(16).padStart(2, '0')).join('');
}
async function sha256Hex(data) {
const buf = await crypto.subtle.digest('SHA-256', data);
return bytesToHex(new Uint8Array(buf));
}
async function loadSecp() {
if (!_secp) _secp = await import(NOBLE_SECP_CDN);
return _secp;
}
/** Serialize a Nostr event to canonical JSON bytes for ID hashing (NIP-01). */
function serializeEvent(ev) {
const arr = [0, ev.pubkey, ev.created_at, ev.kind, ev.tags, ev.content];
return new TextEncoder().encode(JSON.stringify(arr));
}
// ─── public API ──────────────────────────────────────────────────────────────
/**
* Initialise the identity.
* Loads or generates a keypair; checks for NIP-07 extension.
* @returns {{ pubKey: string, npub: string, source: 'nip07'|'local' }}
*/
export async function init() {
const secp = await loadSecp();
// Load or generate local keypair
const stored = localStorage.getItem(LS_KEY);
if (stored && /^[0-9a-f]{64}$/i.test(stored)) {
_privKeyBytes = hexToBytes(stored);
} else {
_privKeyBytes = secp.utils.randomPrivateKey();
localStorage.setItem(LS_KEY, bytesToHex(_privKeyBytes));
}
// x-only Schnorr public key (32 bytes)
_pubKeyHex = bytesToHex(secp.schnorr.getPublicKey(_privKeyBytes));
let source = 'local';
// Prefer NIP-07 extension when available (no popup needed for getPublicKey)
if (window.nostr) {
try {
const ext = await window.nostr.getPublicKey();
if (ext && /^[0-9a-f]{64}$/i.test(ext)) {
_pubKeyHex = ext;
_usingNip07 = true;
source = 'nip07';
}
} catch {
// Extension rejected or errored — fall back to local key
}
}
return { pubKey: _pubKeyHex, npub: getNpub(), source };
}
/** Hex public key (x-only / 32 bytes). Null until init() resolves. */
export function getPublicKey() { return _pubKeyHex; }
/**
* Human-readable abbreviated npub for HUD display.
* We show a truncated hex with an npub… prefix to avoid a full bech32 dep.
*/
export function getNpub() {
if (!_pubKeyHex) return null;
return 'npub…' + _pubKeyHex.slice(-8);
}
/**
* Sign a Nostr event (NIP-01).
* Fills in id and sig. Uses NIP-07 extension if it was detected at init time,
* otherwise signs locally with our generated key — zero popups either way.
*
* @param {{ kind?: number, tags?: string[][], content: string, created_at?: number }} partial
* @returns {Promise<object>} Complete signed event
*/
export async function signEvent(partial) {
const event = {
pubkey: _pubKeyHex,
created_at: partial.created_at ?? Math.floor(Date.now() / 1000),
kind: partial.kind ?? 1,
tags: partial.tags ?? [],
content: partial.content ?? '',
};
event.id = await sha256Hex(serializeEvent(event));
// Try NIP-07 first when present (it signed getPublicKey silently, so it may
// also sign events silently depending on the extension configuration)
if (_usingNip07 && window.nostr) {
try {
return await window.nostr.signEvent(event);
} catch {
// Extension declined — sign locally as fallback
}
}
// Local Schnorr signing with @noble/secp256k1
const secp = await loadSecp();
const sig = secp.schnorr.sign(hexToBytes(event.id), _privKeyBytes);
event.sig = bytesToHex(sig);
return event;
}
/**
* Replace the stored private key (e.g. imported from another client).
* The module must be re-initialised (call init() again) after importing.
* @param {string} hexPrivKey 64-char hex string
*/
export function importKey(hexPrivKey) {
if (!/^[0-9a-f]{64}$/i.test(hexPrivKey)) throw new Error('Invalid private key: expected 64 hex chars');
localStorage.setItem(LS_KEY, hexPrivKey.toLowerCase());
// Caller should await init() to refresh public key
}

View File

@@ -625,6 +625,75 @@ canvas#nexus-canvas {
color: var(--color-primary);
}
/* === EDGE AI + NOSTR SOVEREIGNTY BADGES === */
.hud-sovereignty {
position: fixed;
bottom: 52px;
left: var(--space-4);
display: flex;
align-items: center;
gap: var(--space-2);
z-index: 10;
}
.hud-edge-btn {
background: rgba(10, 15, 40, 0.75);
border: 1px solid rgba(74, 240, 192, 0.3);
border-radius: var(--panel-radius);
padding: var(--space-1) var(--space-3);
cursor: pointer;
font-family: var(--font-body);
font-size: var(--text-xs);
transition: border-color var(--transition-ui), background var(--transition-ui);
backdrop-filter: blur(8px);
}
.hud-edge-btn:hover:not(:disabled) {
border-color: rgba(74, 240, 192, 0.7);
background: rgba(74, 240, 192, 0.08);
}
.hud-edge-btn:disabled {
cursor: default;
}
#edge-ai-badge[data-status="idle"] { color: var(--color-text-muted); }
#edge-ai-badge[data-status="loading"] { color: var(--color-warning); animation: pulse-text 1.2s ease-in-out infinite; }
#edge-ai-badge[data-status="ready"] { color: var(--color-primary); }
#edge-ai-badge[data-status="ollama"] { color: var(--color-secondary); }
#edge-ai-badge[data-status="error"] { color: var(--color-danger); }
.hud-npub-badge {
background: rgba(10, 15, 40, 0.75);
border: 1px solid rgba(123, 92, 255, 0.3);
border-radius: var(--panel-radius);
padding: var(--space-1) var(--space-3);
font-family: var(--font-body);
font-size: var(--text-xs);
color: var(--color-secondary);
display: none;
backdrop-filter: blur(8px);
letter-spacing: 0.05em;
}
.hud-npub-badge.visible {
display: inline-block;
}
/* Thinking dot animation in chat */
.thinking-dot {
display: inline-block;
animation: blink-cursor 0.8s step-end infinite;
color: var(--color-primary);
}
@keyframes blink-cursor {
0%, 100% { opacity: 1; }
50% { opacity: 0; }
}
@keyframes pulse-text {
0%, 100% { opacity: 1; }
50% { opacity: 0.4; }
}
/* Mobile adjustments */
@media (max-width: 480px) {
.chat-panel {
@@ -635,4 +704,7 @@ canvas#nexus-canvas {
.hud-controls {
display: none;
}
.hud-sovereignty {
bottom: var(--space-4);
}
}