[claude] Edge intelligence — browser model + silent Nostr signing (#15) #27
116
app.js
116
app.js
@@ -3,6 +3,8 @@ import { EffectComposer } from 'three/addons/postprocessing/EffectComposer.js';
|
||||
import { RenderPass } from 'three/addons/postprocessing/RenderPass.js';
|
||||
import { UnrealBloomPass } from 'three/addons/postprocessing/UnrealBloomPass.js';
|
||||
import { SMAAPass } from 'three/addons/postprocessing/SMAAPass.js';
|
||||
import { EdgeIntelligence } from './edge-intelligence.js';
|
||||
import { NostrIdentity } from './nostr-identity.js';
|
||||
|
||||
// ═══════════════════════════════════════════
|
||||
// NEXUS v1 — Timmy's Sovereign Home
|
||||
@@ -100,6 +102,35 @@ function init() {
|
||||
|
||||
updateLoad(100);
|
||||
|
||||
// ── Nostr identity (silent — no popup) ─────
|
||||
NostrIdentity.init().then(({ npub }) => {
|
||||
const el = document.getElementById('nostr-npub');
|
||||
if (el) el.textContent = npub.slice(0, 16) + '…';
|
||||
addChatMessage('system', `Nostr identity loaded: ${npub.slice(0, 16)}…`);
|
||||
}).catch(err => {
|
||||
console.warn('[Nostr] Identity init failed:', err);
|
||||
});
|
||||
|
||||
// ── Edge Intelligence (lazy — activate via HUD button) ──
|
||||
// Wire up the "Activate Edge AI" button shown in the HUD
|
||||
const edgeBtn = document.getElementById('edge-ai-activate');
|
||||
if (edgeBtn) {
|
||||
edgeBtn.addEventListener('click', () => {
|
||||
edgeBtn.disabled = true;
|
||||
EdgeIntelligence.init((state, text) => {
|
||||
const badge = document.getElementById('edge-ai-status');
|
||||
if (badge) {
|
||||
badge.textContent = text;
|
||||
badge.dataset.state = state;
|
||||
}
|
||||
if (state === 'ready') {
|
||||
if (edgeBtn) edgeBtn.style.display = 'none';
|
||||
addChatMessage('system', `Edge AI online — ${text}`);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Transition from loading to enter screen
|
||||
setTimeout(() => {
|
||||
document.getElementById('loading-screen').classList.add('fade-out');
|
||||
@@ -836,30 +867,81 @@ function setupControls() {
|
||||
document.getElementById('chat-send').addEventListener('click', sendChatMessage);
|
||||
}
|
||||
|
||||
function sendChatMessage() {
|
||||
async function sendChatMessage() {
|
||||
const input = document.getElementById('chat-input');
|
||||
const text = input.value.trim();
|
||||
if (!text) return;
|
||||
|
||||
addChatMessage('user', text);
|
||||
input.value = '';
|
||||
|
||||
// Simulate Timmy response
|
||||
setTimeout(() => {
|
||||
const responses = [
|
||||
'Processing your request through the harness...',
|
||||
'I have noted this in my thought stream.',
|
||||
'Acknowledged. Routing to appropriate agent loop.',
|
||||
'The sovereign space recognizes your command.',
|
||||
'Running analysis. Results will appear on the main terminal.',
|
||||
'My crystal ball says... yes. Implementing.',
|
||||
'Understood, Alexander. Adjusting priorities.',
|
||||
];
|
||||
const resp = responses[Math.floor(Math.random() * responses.length)];
|
||||
addChatMessage('timmy', resp);
|
||||
}, 500 + Math.random() * 1000);
|
||||
|
||||
input.blur();
|
||||
|
||||
// ── Edge intelligence pipeline ──────────────
|
||||
// 1. Local browser model (WebLLM / Transformers.js)
|
||||
// 2. Ollama backend (server round-trip)
|
||||
// 3. Local fallback responses
|
||||
|
||||
const thinkingId = _addThinkingIndicator();
|
||||
|
||||
// Try edge model first (local, no server round-trip)
|
||||
const edgeReply = await EdgeIntelligence.query(text);
|
||||
_removeThinkingIndicator(thinkingId);
|
||||
|
||||
if (edgeReply) {
|
||||
addChatMessage('timmy', edgeReply);
|
||||
return;
|
||||
}
|
||||
|
||||
// Try Ollama backend
|
||||
try {
|
||||
const ollamaReply = await _queryOllama(text);
|
||||
if (ollamaReply) {
|
||||
addChatMessage('timmy', ollamaReply);
|
||||
return;
|
||||
}
|
||||
} catch { /* Ollama not running — fall through */ }
|
||||
|
||||
// Local fallback responses
|
||||
const fallbacks = [
|
||||
'Processing your request through the harness...',
|
||||
'I have noted this in my thought stream.',
|
||||
'Acknowledged. Routing to appropriate agent loop.',
|
||||
'The sovereign space recognizes your command.',
|
||||
'Running analysis. Results will appear on the main terminal.',
|
||||
'My crystal ball says... yes. Implementing.',
|
||||
'Understood, Alexander. Adjusting priorities.',
|
||||
];
|
||||
addChatMessage('timmy', fallbacks[Math.floor(Math.random() * fallbacks.length)]);
|
||||
}
|
||||
|
||||
async function _queryOllama(text) {
|
||||
const res = await fetch('/api/chat', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ message: text }),
|
||||
signal: AbortSignal.timeout(8000),
|
||||
});
|
||||
if (!res.ok) return null;
|
||||
const data = await res.json();
|
||||
return data?.reply ?? data?.response ?? null;
|
||||
}
|
||||
|
||||
let _thinkingCounter = 0;
|
||||
function _addThinkingIndicator() {
|
||||
const id = ++_thinkingCounter;
|
||||
const container = document.getElementById('chat-messages');
|
||||
const div = document.createElement('div');
|
||||
div.className = 'chat-msg chat-msg-timmy chat-msg-thinking';
|
||||
div.dataset.thinkingId = id;
|
||||
div.innerHTML = '<span class="chat-msg-prefix">[TIMMY]</span> <span class="thinking-dots">●●●</span>';
|
||||
container.appendChild(div);
|
||||
container.scrollTop = container.scrollHeight;
|
||||
return id;
|
||||
}
|
||||
|
||||
function _removeThinkingIndicator(id) {
|
||||
const el = document.querySelector(`[data-thinking-id="${id}"]`);
|
||||
if (el) el.remove();
|
||||
}
|
||||
|
||||
function addChatMessage(type, text) {
|
||||
|
||||
145
edge-intelligence.js
Normal file
145
edge-intelligence.js
Normal file
@@ -0,0 +1,145 @@
|
||||
// ═══════════════════════════════════════════
|
||||
// EDGE INTELLIGENCE — Browser-side LLM
|
||||
// ═══════════════════════════════════════════
|
||||
// Inference priority:
|
||||
// 1. WebLLM (WebGPU) — SmolLM2-360M, near-zero latency once loaded
|
||||
// 2. Transformers.js (WASM/CPU) — LaMini-Flan-T5-77M, broader device support
|
||||
// 3. Signals caller to use Ollama backend
|
||||
// All modes are local-first; no server round-trip for simple queries.
|
||||
|
||||
const WEBLLM_MODEL = 'SmolLM2-360M-Instruct-q0f16-MLC';
|
||||
const TRANSFORMERS_MODEL = 'Xenova/LaMini-Flan-T5-77M';
|
||||
|
||||
const SYSTEM_PROMPT =
|
||||
'You are Timmy, a sovereign lizard-wizard AI in The Nexus. ' +
|
||||
'Be concise, witty, and helpful. Keep replies under 2 sentences.';
|
||||
|
||||
// ─── State ─────────────────────────────────
|
||||
let _engine = null;
|
||||
let _mode = 'uninitialized'; // 'webllm' | 'transformers' | 'ollama' | 'uninitialized'
|
||||
let _statusCb = null;
|
||||
let _isLoading = false;
|
||||
let _isReady = false;
|
||||
|
||||
// ─── Public API ────────────────────────────
|
||||
|
||||
/**
|
||||
* Begin loading the best available edge model.
|
||||
* @param {(state: string, text: string) => void} statusCallback
|
||||
*/
|
||||
async function init(statusCallback) {
|
||||
if (_isLoading || _isReady) return;
|
||||
_isLoading = true;
|
||||
_statusCb = statusCallback ?? (() => {});
|
||||
|
||||
_setStatus('loading', 'Probing edge capabilities…');
|
||||
|
||||
if (await _tryWebLLM()) { _isLoading = false; _isReady = true; return; }
|
||||
if (await _tryTransformers()) { _isLoading = false; _isReady = true; return; }
|
||||
|
||||
// Both failed — signal Ollama fallback
|
||||
_mode = 'ollama';
|
||||
_isLoading = false;
|
||||
_setStatus('fallback', 'Edge AI: Ollama backend');
|
||||
}
|
||||
|
||||
/**
|
||||
* Run inference on a user message.
|
||||
* Returns a string if handled locally, null if caller should use Ollama.
|
||||
* @param {string} userMessage
|
||||
* @returns {Promise<string|null>}
|
||||
*/
|
||||
async function query(userMessage) {
|
||||
if (!_isReady) return null;
|
||||
|
||||
try {
|
||||
if (_mode === 'webllm' && _engine) {
|
||||
const result = await _engine.chat.completions.create({
|
||||
messages: [
|
||||
{ role: 'system', content: SYSTEM_PROMPT },
|
||||
{ role: 'user', content: userMessage },
|
||||
],
|
||||
max_tokens: 120,
|
||||
temperature: 0.7,
|
||||
stream: false,
|
||||
});
|
||||
return result.choices[0].message.content.trim();
|
||||
}
|
||||
|
||||
if (_mode === 'transformers' && _engine) {
|
||||
const out = await _engine(userMessage, {
|
||||
max_new_tokens: 80,
|
||||
do_sample: true,
|
||||
temperature: 0.7,
|
||||
});
|
||||
const raw = out[0]?.generated_text ?? '';
|
||||
// Strip the input echo that some seq2seq models return
|
||||
return raw.replace(userMessage, '').trim() || raw.trim();
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn('[EdgeAI] Inference error:', err);
|
||||
}
|
||||
|
||||
return null; // caller should fall back to Ollama
|
||||
}
|
||||
|
||||
/** True once a model is loaded and ready. */
|
||||
function isReady() { return _isReady; }
|
||||
|
||||
/** Current inference mode string. */
|
||||
function getMode() { return _mode; }
|
||||
|
||||
// ─── Private helpers ───────────────────────
|
||||
|
||||
async function _tryWebLLM() {
|
||||
if (!navigator.gpu) return false;
|
||||
try {
|
||||
_setStatus('loading', 'Initializing WebLLM (WebGPU)…');
|
||||
const { CreateMLCEngine } = await import('https://esm.run/@mlc-ai/web-llm');
|
||||
_engine = await CreateMLCEngine(WEBLLM_MODEL, {
|
||||
initProgressCallback: (p) => {
|
||||
const pct = Math.round((p.progress ?? 0) * 100);
|
||||
_setStatus('loading', `Loading SmolLM2: ${pct}%`);
|
||||
},
|
||||
});
|
||||
_mode = 'webllm';
|
||||
_setStatus('ready', 'Edge AI: WebGPU ⚡');
|
||||
return true;
|
||||
} catch (err) {
|
||||
console.warn('[EdgeAI] WebLLM unavailable:', err.message ?? err);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function _tryTransformers() {
|
||||
try {
|
||||
_setStatus('loading', 'Initializing edge model (CPU)…');
|
||||
const { pipeline, env } = await import(
|
||||
'https://cdn.jsdelivr.net/npm/@xenova/transformers@2.17.2'
|
||||
);
|
||||
// Use local cache; allow remote model hub
|
||||
env.allowLocalModels = false;
|
||||
env.allowRemoteModels = true;
|
||||
|
||||
_engine = await pipeline('text2text-generation', TRANSFORMERS_MODEL, {
|
||||
progress_callback: (info) => {
|
||||
if (info.status === 'downloading') {
|
||||
const pct = info.total ? Math.round((info.loaded / info.total) * 100) : 0;
|
||||
_setStatus('loading', `Downloading model: ${pct}%`);
|
||||
}
|
||||
},
|
||||
});
|
||||
_mode = 'transformers';
|
||||
_setStatus('ready', 'Edge AI: CPU ◈');
|
||||
return true;
|
||||
} catch (err) {
|
||||
console.warn('[EdgeAI] Transformers.js unavailable:', err.message ?? err);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function _setStatus(state, text) {
|
||||
_statusCb(state, text);
|
||||
}
|
||||
|
||||
export const EdgeIntelligence = { init, query, isReady, getMode };
|
||||
14
index.html
14
index.html
@@ -95,6 +95,20 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Edge AI status badge -->
|
||||
<div id="edge-ai-panel" class="edge-ai-panel">
|
||||
<span id="edge-ai-status" class="edge-ai-status" data-state="idle">Edge AI: offline</span>
|
||||
<button id="edge-ai-activate" class="edge-ai-btn" title="Load browser-side model for local inference">
|
||||
◈ Activate
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<!-- Nostr identity badge -->
|
||||
<div id="nostr-panel" class="nostr-panel" title="Local Nostr identity (no extension required)">
|
||||
<span class="nostr-icon">⚡</span>
|
||||
<span id="nostr-npub" class="nostr-npub">loading…</span>
|
||||
</div>
|
||||
|
||||
<!-- Minimap / Controls hint -->
|
||||
<div class="hud-controls">
|
||||
<span>WASD</span> move <span>Mouse</span> look <span>Enter</span> chat
|
||||
|
||||
163
nostr-identity.js
Normal file
163
nostr-identity.js
Normal file
@@ -0,0 +1,163 @@
|
||||
// ═══════════════════════════════════════════
|
||||
// NOSTR IDENTITY — Silent signing, no popup
|
||||
// ═══════════════════════════════════════════
|
||||
// Manages a local Nostr keypair stored in localStorage.
|
||||
// Events are signed client-side without triggering any extension popup.
|
||||
//
|
||||
// Priority:
|
||||
// 1. Existing key from localStorage (persisted identity)
|
||||
// 2. Freshly generated key (new sovereign identity)
|
||||
//
|
||||
// Users can optionally delegate signing to window.nostr (NIP-07) by
|
||||
// calling useExtension(true), but the default is always silent/local.
|
||||
|
||||
const STORAGE_KEY = 'nexus_nostr_identity_v1';
|
||||
const NOSTR_TOOLS = 'https://esm.sh/nostr-tools@1.17.0';
|
||||
|
||||
// ─── State ─────────────────────────────────
|
||||
let _identity = null; // { privkey: hex, pubkey: hex, npub: string }
|
||||
let _useExtension = false;
|
||||
let _nt = null; // nostr-tools module reference
|
||||
|
||||
// ─── Public API ────────────────────────────
|
||||
|
||||
/**
|
||||
* Load or generate a local Nostr identity.
|
||||
* Returns { pubkey, npub } — private key is never exposed externally.
|
||||
*/
|
||||
async function init() {
|
||||
_nt = await _loadTools();
|
||||
_identity = _loadStored() ?? _generate();
|
||||
return { pubkey: _identity.pubkey, npub: _identity.npub };
|
||||
}
|
||||
|
||||
/**
|
||||
* Sign a Nostr event silently (no extension popup).
|
||||
* @param {number} kind NIP event kind
|
||||
* @param {string} content Event content
|
||||
* @param {Array} tags Optional tags array
|
||||
* @returns {Promise<object>} Fully signed Nostr event
|
||||
*/
|
||||
async function sign(kind, content, tags = []) {
|
||||
if (_useExtension && window?.nostr) {
|
||||
return window.nostr.signEvent({
|
||||
kind,
|
||||
content,
|
||||
tags,
|
||||
created_at: Math.floor(Date.now() / 1000),
|
||||
});
|
||||
}
|
||||
return _signLocal(kind, content, tags);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return public identity (never the private key).
|
||||
* @returns {{ pubkey: string, npub: string } | null}
|
||||
*/
|
||||
function getIdentity() {
|
||||
if (!_identity) return null;
|
||||
return { pubkey: _identity.pubkey, npub: _identity.npub };
|
||||
}
|
||||
|
||||
/**
|
||||
* Import an existing hex private key.
|
||||
* Overwrites the stored identity.
|
||||
* @param {string} hexPrivkey
|
||||
* @returns {{ pubkey: string, npub: string }}
|
||||
*/
|
||||
async function importKey(hexPrivkey) {
|
||||
if (!_nt) _nt = await _loadTools();
|
||||
const pubkey = _nt.getPublicKey(hexPrivkey);
|
||||
const npub = _nt.nip19.npubEncode(pubkey);
|
||||
_identity = { privkey: hexPrivkey, pubkey, npub };
|
||||
_persist(_identity);
|
||||
return { pubkey, npub };
|
||||
}
|
||||
|
||||
/**
|
||||
* Rotate to a fresh generated keypair.
|
||||
* @returns {{ pubkey: string, npub: string }}
|
||||
*/
|
||||
async function rotateKey() {
|
||||
if (!_nt) _nt = await _loadTools();
|
||||
_identity = _generate();
|
||||
return { pubkey: _identity.pubkey, npub: _identity.npub };
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggle whether to delegate signing to the NIP-07 browser extension.
|
||||
* When false (default), all signing is silent/local.
|
||||
* @param {boolean} yes
|
||||
*/
|
||||
function useExtension(yes) {
|
||||
_useExtension = !!yes;
|
||||
}
|
||||
|
||||
// ─── Private helpers ───────────────────────
|
||||
|
||||
async function _loadTools() {
|
||||
const mod = await import(NOSTR_TOOLS);
|
||||
// nostr-tools v1 default export is the full module
|
||||
return mod.default ?? mod;
|
||||
}
|
||||
|
||||
function _loadStored() {
|
||||
try {
|
||||
const raw = localStorage.getItem(STORAGE_KEY);
|
||||
if (!raw) return null;
|
||||
const stored = JSON.parse(raw);
|
||||
// Basic validation
|
||||
if (stored?.privkey?.length === 64 && stored?.pubkey?.length === 64) {
|
||||
return stored;
|
||||
}
|
||||
} catch { /* corrupted — regenerate */ }
|
||||
return null;
|
||||
}
|
||||
|
||||
function _generate() {
|
||||
const privkey = _nt.generatePrivateKey();
|
||||
const pubkey = _nt.getPublicKey(privkey);
|
||||
const npub = _nt.nip19.npubEncode(pubkey);
|
||||
const id = { privkey, pubkey, npub };
|
||||
_persist(id);
|
||||
console.info(`[Nostr] New sovereign identity: ${npub.slice(0, 16)}…`);
|
||||
return id;
|
||||
}
|
||||
|
||||
function _persist(id) {
|
||||
try {
|
||||
localStorage.setItem(STORAGE_KEY, JSON.stringify(id));
|
||||
} catch { /* storage unavailable */ }
|
||||
}
|
||||
|
||||
function _signLocal(kind, content, tags) {
|
||||
if (!_nt || !_identity) throw new Error('[Nostr] Identity not initialized');
|
||||
|
||||
// Build the event
|
||||
const event = {
|
||||
kind,
|
||||
content,
|
||||
tags,
|
||||
created_at: Math.floor(Date.now() / 1000),
|
||||
pubkey: _identity.pubkey,
|
||||
};
|
||||
|
||||
// finishEvent computes id + sig in one step (nostr-tools v1.1+)
|
||||
if (typeof _nt.finishEvent === 'function') {
|
||||
return _nt.finishEvent(event, _identity.privkey);
|
||||
}
|
||||
|
||||
// Older v1 fallback
|
||||
event.id = _nt.getEventHash(event);
|
||||
event.sig = _nt.signEvent(event, _identity.privkey);
|
||||
return event;
|
||||
}
|
||||
|
||||
export const NostrIdentity = {
|
||||
init,
|
||||
sign,
|
||||
getIdentity,
|
||||
importKey,
|
||||
rotateKey,
|
||||
useExtension,
|
||||
};
|
||||
93
style.css
93
style.css
@@ -348,6 +348,92 @@ canvas#nexus-canvas {
|
||||
color: var(--color-primary);
|
||||
}
|
||||
|
||||
/* === EDGE AI PANEL === */
|
||||
.edge-ai-panel {
|
||||
position: absolute;
|
||||
top: var(--space-3);
|
||||
right: var(--space-3);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--space-2);
|
||||
background: var(--color-surface);
|
||||
backdrop-filter: blur(var(--panel-blur));
|
||||
border: 1px solid var(--color-border);
|
||||
border-radius: 20px;
|
||||
padding: var(--space-1) var(--space-3);
|
||||
pointer-events: auto;
|
||||
}
|
||||
|
||||
.edge-ai-status {
|
||||
font-size: var(--text-xs);
|
||||
color: var(--color-text-muted);
|
||||
letter-spacing: 0.05em;
|
||||
transition: color var(--transition-ui);
|
||||
}
|
||||
.edge-ai-status[data-state="loading"] { color: var(--color-warning); }
|
||||
.edge-ai-status[data-state="ready"] { color: var(--color-primary); }
|
||||
.edge-ai-status[data-state="fallback"]{ color: var(--color-secondary); }
|
||||
|
||||
.edge-ai-btn {
|
||||
background: none;
|
||||
border: 1px solid var(--color-border-bright);
|
||||
border-radius: 12px;
|
||||
padding: 2px var(--space-3);
|
||||
font-family: var(--font-display);
|
||||
font-size: 10px;
|
||||
font-weight: 600;
|
||||
letter-spacing: 0.08em;
|
||||
color: var(--color-primary);
|
||||
cursor: pointer;
|
||||
transition: background var(--transition-ui), border-color var(--transition-ui);
|
||||
}
|
||||
.edge-ai-btn:hover { background: var(--color-primary-dim); }
|
||||
.edge-ai-btn:disabled {
|
||||
opacity: 0.4;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
/* === NOSTR IDENTITY PANEL === */
|
||||
.nostr-panel {
|
||||
position: absolute;
|
||||
top: calc(var(--space-3) + 36px + var(--space-2));
|
||||
right: var(--space-3);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--space-2);
|
||||
background: var(--color-surface);
|
||||
backdrop-filter: blur(var(--panel-blur));
|
||||
border: 1px solid rgba(123, 92, 255, 0.25);
|
||||
border-radius: 20px;
|
||||
padding: var(--space-1) var(--space-3);
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.nostr-icon {
|
||||
font-size: 11px;
|
||||
color: var(--color-secondary);
|
||||
}
|
||||
|
||||
.nostr-npub {
|
||||
font-size: var(--text-xs);
|
||||
color: var(--color-text-muted);
|
||||
letter-spacing: 0.04em;
|
||||
font-variant-numeric: tabular-nums;
|
||||
}
|
||||
|
||||
/* === CHAT THINKING INDICATOR === */
|
||||
.chat-msg-thinking .thinking-dots {
|
||||
display: inline-block;
|
||||
animation: thinking-pulse 1.2s ease-in-out infinite;
|
||||
letter-spacing: 0.15em;
|
||||
color: var(--color-primary);
|
||||
opacity: 0.7;
|
||||
}
|
||||
@keyframes thinking-pulse {
|
||||
0%, 100% { opacity: 0.3; }
|
||||
50% { opacity: 1; }
|
||||
}
|
||||
|
||||
/* Mobile adjustments */
|
||||
@media (max-width: 480px) {
|
||||
.chat-panel {
|
||||
@@ -358,4 +444,11 @@ canvas#nexus-canvas {
|
||||
.hud-controls {
|
||||
display: none;
|
||||
}
|
||||
.edge-ai-panel,
|
||||
.nostr-panel {
|
||||
right: var(--space-2);
|
||||
}
|
||||
.edge-ai-btn {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user