This repository has been archived on 2026-03-24. You can view files and clone it. You cannot open issues or pull requests or push a commit.
Files
token-gated-economy/the-matrix/js/edge-worker-client.js
Replit Agent 898a47fd39 task-28 fix: proper Web Worker, correct Nostr endpoints, sentiment on inbound msgs
Addresses all code review rejections:

1. edge-worker.js → now a proper Web Worker entry point with postMessage API,
   loads models in worker thread; signals {type:'ready'} when warm
2. edge-worker-client.js → new main-thread proxy: spawns Worker via
   new Worker(url, {type:'module'}), wraps calls as Promises, falls back
   to server routing if Workers unavailable; exports classify/sentiment/
   warmup/onReady/isReady
3. nostr-identity.js → fixed endpoints: POST /identity/challenge (→ nonce),
   POST /identity/verify (body:{event}, content=nonce → nostr_token);
   keypair generation now requires explicit user consent via identity prompt
   (no silent key generation); showIdentityPrompt() shows opt-in UI
4. ui.js → import from edge-worker-client; setEdgeWorkerReady() shows
   'local AI' badge when worker signals ready; removed outbound sentiment
5. websocket.js → sentiment() on inbound Timmy chat messages drives setMood()
6. session.js → sentiment() on inbound reply (data.result), not outbound text
7. main.js → onEdgeWorkerReady(() => setEdgeWorkerReady()) wires ready badge
8. vite.config.js → worker.format:'es' for ESM Web Worker bundling
2026-03-19 18:16:40 +00:00

101 lines
3.1 KiB
JavaScript

/**
* edge-worker-client.js — Main-thread proxy for the edge-worker Web Worker.
*
* Spawns js/edge-worker.js as a module Worker and exposes:
* classify(text) → Promise<{ label:'local'|'server', score, reason, localReply? }>
* sentiment(text) → Promise<{ label:'POSITIVE'|'NEGATIVE'|'NEUTRAL', score }>
* onReady(fn) → register a callback fired when models finish loading
* isReady() → boolean — true once both models are warm
*
* If Web Workers are unavailable (SSR / old browser), all calls fall back to
* the naive "route to server" path so the app remains functional.
*/
let _worker = null;
let _ready = false;
let _readyCb = null;
const _pending = new Map(); // id → { resolve, reject }
let _nextId = 1;
function _init() {
if (_worker) return;
try {
// Use import.meta.url so Vite can resolve the worker URL correctly.
// type:'module' is required for ESM imports inside the worker.
_worker = new Worker(new URL('./edge-worker.js', import.meta.url), { type: 'module' });
_worker.addEventListener('message', ({ data }) => {
// Lifecycle events have no id
if (data?.type === 'ready') {
_ready = true;
if (_readyCb) { _readyCb(); _readyCb = null; }
return;
}
if (data?.type === 'error') {
console.warn('[edge-worker] worker boot error:', data.message);
// Resolve all pending with fallback values
for (const [, { resolve }] of _pending) resolve(_fallback(null));
_pending.clear();
return;
}
// Regular response: { id, result }
const { id, result } = data ?? {};
const entry = _pending.get(id);
if (entry) {
_pending.delete(id);
entry.resolve(result);
}
});
_worker.addEventListener('error', (err) => {
console.warn('[edge-worker] worker error:', err.message);
});
} catch (err) {
console.warn('[edge-worker] Web Workers unavailable — using fallback routing:', err.message);
_worker = null;
}
}
function _fallback(type) {
if (type === 'sentiment') return { label: 'NEUTRAL', score: 0.5 };
return { label: 'server', score: 0, reason: 'worker-unavailable' };
}
function _send(type, text) {
if (!_worker) return Promise.resolve(_fallback(type));
const id = _nextId++;
return new Promise((resolve) => {
_pending.set(id, { resolve, reject: resolve });
_worker.postMessage({ id, type, text });
});
}
// ── Public API ────────────────────────────────────────────────────────────────
export function classify(text) {
_init();
return _send('classify', text);
}
export function sentiment(text) {
_init();
return _send('sentiment', text);
}
export function onReady(fn) {
if (_ready) { fn(); return; }
_readyCb = fn;
}
export function isReady() { return _ready; }
/**
* warmup() — start the worker (and model loading) early so classify/sentiment
* calls on first user interaction don't stall waiting for models.
*/
export function warmup() { _init(); }