task-28 fix3: complexity contract, consistent token headers, npub-only prompt
Some checks failed
CI / Typecheck & Lint (pull_request) Failing after 0s

1. edge-worker.js: replace binary label:local|server with complexity:trivial|moderate|complex
   - trivial  = greeting/small-talk ≥ 0.55 confidence → localReply, 0 sats
   - moderate = simple-question or uncertain score → show estimate, route to server
   - complex  = technical/creative/code OR score < 0.40 → show estimate, route to server
   - model-unavailable fallback → moderate (safe default, not 'server')

2. edge-worker-client.js: update fallback and JSDoc to new complexity shape
   - fallback returns { complexity:'moderate', ... } instead of { label:'server', ... }

3. ui.js: triage driven by cls.complexity, not cls.label
   - trivial + localReply → local answer, 0 sats badge, no server call
   - moderate/complex → _fetchEstimate() fired on classify outcome (not just debounce)
     then routed to server via WebSocket

4. session.js: X-Nostr-Token attached consistently on ALL outbound session calls
   - _startDepositPolling: GET /sessions/:id now includes X-Nostr-Token header
   - _startTopupPolling: GET /sessions/:id now includes X-Nostr-Token header
   - _tryRestore: GET /sessions/:id now includes X-Nostr-Token header
   - _createTopup: POST /sessions/:id/topup now includes X-Nostr-Token header

5. nostr-identity.js: _canSign flag tracks signing capability separately from pubkey
   - initNostrIdentity sets _canSign=true only when NIP-07 or privkey is available
   - npub-only discovery sets _pubkey but _canSign=false → prompt IS scheduled
   - Prompt shown when !_pubkey || !_canSign (not just !_pubkey)
   - Prompt click handlers set _canSign=true after connecting NIP-07 or generating key
   - refreshToken only called when _pubkey && _canSign (avoids silent failures)
This commit is contained in:
Replit Agent
2026-03-19 19:02:45 +00:00
parent 224208fa0f
commit 494393017c
5 changed files with 112 additions and 37 deletions

View File

@@ -2,13 +2,24 @@
* edge-worker-client.js — Main-thread proxy for the edge-worker Web Worker.
*
* Spawns js/edge-worker.js as a module Worker and exposes:
* classify(text) → Promise<{ label:'local'|'server', score, reason, localReply? }>
* classify(text) → Promise<{
* complexity: 'trivial'|'moderate'|'complex',
* score: number,
* reason: string,
* localReply?: string // only when complexity === 'trivial'
* }>
* sentiment(text) → Promise<{ label:'POSITIVE'|'NEGATIVE'|'NEUTRAL', score }>
* onReady(fn) → register a callback fired when models finish loading
* isReady() → boolean — true once both models are warm
* warmup() → start the worker early so first classify() is fast
*
* If Web Workers are unavailable (SSR / old browser), all calls fall back to
* the naive "route to server" path so the app remains functional.
* Complexity tiers (set by the worker):
* trivial — greeting/small-talk; answered locally, 0 sats, no server call
* moderate — simple question; show cost preview, route to server
* complex — technical/creative/code; always priced, show cost preview
*
* If Web Workers are unavailable (SSR / old browser), all calls fall back
* gracefully: classify → { complexity:'moderate', ... } so the app still works.
*/
let _worker = null;
@@ -61,7 +72,8 @@ function _init() {
function _fallback(type) {
if (type === 'sentiment') return { label: 'NEUTRAL', score: 0.5 };
return { label: 'server', score: 0, reason: 'worker-unavailable' };
// classify fallback: moderate keeps the UI functional (shows estimate, routes to server)
return { complexity: 'moderate', score: 0, reason: 'worker-unavailable' };
}
function _send(type, text) {

View File

@@ -29,10 +29,17 @@ import { pipeline, env } from '@xenova/transformers';
env.useBrowserCache = true; // cache model weights via browser Cache API
env.allowLocalModels = false; // no filesystem — browser-only environment
const LOCAL_LABELS = ['greeting', 'small-talk', 'simple-question'];
const SERVER_LABELS = ['technical-task', 'creative-work', 'complex-question', 'code-request'];
const ALL_LABELS = [...LOCAL_LABELS, ...SERVER_LABELS];
const LOCAL_THRESHOLD = 0.55;
// Classification labels → complexity tier mapping
// trivial — handled locally, no server call, no sats
// moderate — real request but may be free-tier; show cost preview before send
// complex — substantive work; always priced; show cost preview before send
const TRIVIAL_LABELS = ['greeting', 'small-talk'];
const MODERATE_LABELS = ['simple-question'];
const COMPLEX_LABELS = ['technical-task', 'creative-work', 'complex-question', 'code-request'];
const ALL_LABELS = [...TRIVIAL_LABELS, ...MODERATE_LABELS, ...COMPLEX_LABELS];
const TRIVIAL_THRESHOLD = 0.55; // minimum score to call trivial "trivial"
const MODERATE_THRESHOLD = 0.40; // below this → upgrade to complex (model is uncertain)
const LOCAL_REPLIES = [
"Greetings, traveller! Ask me something arcane and I shall conjure wisdom from the ether.",
@@ -64,31 +71,55 @@ async function _loadModels() {
// ── Handlers ──────────────────────────────────────────────────────────────────
/**
* _classify — returns the complexity tier of the input text.
*
* Returns:
* {
* complexity: 'trivial' | 'moderate' | 'complex',
* score: number, // top-label confidence
* reason: string, // winning label name or heuristic name
* localReply?: string, // only present when complexity === 'trivial'
* }
*
* Complexity tiers:
* trivial — greeting/small-talk with high confidence; answered locally, 0 sats
* moderate — simple question or uncertain classification; show cost preview, route to server
* complex — technical / creative / code work; always priced, show cost preview
*/
async function _classify(text) {
const trimmed = text.trim();
// Fast path: single-word/short greetings are trivially local
if (_isGreeting(trimmed)) {
return { label: 'local', score: 0.99, reason: 'greeting-heuristic', localReply: _randomReply() };
return { complexity: 'trivial', score: 0.99, reason: 'greeting-heuristic', localReply: _randomReply() };
}
// If model hasn't loaded yet, default to moderate (show estimate, let server decide)
if (!_classifier) {
return { label: 'server', score: 0, reason: 'model-unavailable' };
return { complexity: 'moderate', score: 0, reason: 'model-unavailable' };
}
try {
const result = await _classifier(trimmed, ALL_LABELS, { multi_label: false });
const result = await _classifier(trimmed, ALL_LABELS, { multi_label: false });
const topLabel = result.labels[0];
const topScore = result.scores[0];
const isLocal = LOCAL_LABELS.includes(topLabel) && topScore >= LOCAL_THRESHOLD;
return {
label: isLocal ? 'local' : 'server',
score: topScore,
reason: topLabel,
...(isLocal ? { localReply: _randomReply() } : {}),
};
if (TRIVIAL_LABELS.includes(topLabel) && topScore >= TRIVIAL_THRESHOLD) {
// High-confidence trivial: small-talk or greeting → answer locally
return { complexity: 'trivial', score: topScore, reason: topLabel, localReply: _randomReply() };
}
if (COMPLEX_LABELS.includes(topLabel) || topScore < MODERATE_THRESHOLD) {
// Explicitly complex label, or model is uncertain (score too low to trust) → complex
return { complexity: 'complex', score: topScore, reason: topLabel };
}
// Middle ground: simple-question or trivial label with moderate confidence → moderate
return { complexity: 'moderate', score: topScore, reason: topLabel };
} catch (err) {
return { label: 'server', score: 0, reason: 'classify-error', error: String(err) };
return { complexity: 'moderate', score: 0, reason: 'classify-error', error: String(err) };
}
}

View File

@@ -30,6 +30,7 @@ let _pubkey = null; // hex
let _token = null; // nostr_token string from server
let _tokenExp = 0; // unix ms when token was fetched
let _useNip07 = false; // true if window.nostr is available
let _canSign = false; // true if we have a signing capability (NIP-07 or local privkey)
let _identityPromptShown = false;
// ── Init ──────────────────────────────────────────────────────────────────────
@@ -40,6 +41,7 @@ export async function initNostrIdentity(apiBase = '/api') {
try {
_pubkey = await window.nostr.getPublicKey();
_useNip07 = true;
_canSign = true;
console.info('[nostr] Using NIP-07 extension, pubkey:', _pubkey.slice(0, 8) + '…');
} catch (err) {
console.warn('[nostr] NIP-07 getPublicKey failed, will use local keypair', err);
@@ -47,24 +49,33 @@ export async function initNostrIdentity(apiBase = '/api') {
}
}
// Try restoring an existing local keypair (consented previously)
// Try restoring / discovering an existing keypair
if (!_pubkey) {
_pubkey = _loadKeypair();
if (_pubkey) {
console.info('[nostr] Restored local keypair, pubkey:', _pubkey.slice(0, 8) + '…');
// Check if we actually have a privkey for signing (npub-only discovery gives pubkey but no privkey)
_canSign = !!_getPrivkeyBytes();
if (_canSign) {
console.info('[nostr] Restored local keypair (with signing), pubkey:', _pubkey.slice(0, 8) + '…');
} else {
console.info('[nostr] Discovered pubkey (view-only, no privkey), pubkey:', _pubkey.slice(0, 8) + '…');
}
}
}
// Try to restore cached token
_loadCachedToken();
// If we have a pubkey but no valid token, authenticate now
if (_pubkey && !_isTokenValid()) {
// If we have signing capability and no valid token, authenticate now
if (_pubkey && _canSign && !_isTokenValid()) {
await refreshToken(apiBase);
}
// If no identity at all, show the opt-in prompt so the user can choose
if (!_pubkey) {
// Show the opt-in prompt if:
// a) No identity at all — user can generate a keypair or connect NIP-07
// b) Have a pubkey but no signing capability (npub-only discovery) —
// user should be offered a way to add signing (generate new key or NIP-07)
if (!_pubkey || !_canSign) {
_scheduleIdentityPrompt(apiBase);
}
}
@@ -185,12 +196,14 @@ export function showIdentityPrompt(apiBase = '/api') {
try {
_pubkey = await window.nostr.getPublicKey();
_useNip07 = true;
_canSign = true;
} catch { return; }
} else {
// Generate + store keypair (user consented by clicking)
_pubkey = _generateAndSaveKeypair();
_pubkey = _generateAndSaveKeypair();
_canSign = true;
}
if (_pubkey) {
if (_pubkey && _canSign) {
await refreshToken(apiBase);
_updateIdentityHUD();
}

View File

@@ -278,7 +278,9 @@ function _startDepositPolling() {
async function poll() {
if (!_sessionId) return;
try {
const res = await fetch(`${API}/sessions/${_sessionId}`);
const nostrToken = await getOrRefreshToken('/api');
const pollHeaders = nostrToken ? { 'X-Nostr-Token': nostrToken } : {};
const res = await fetch(`${API}/sessions/${_sessionId}`, { headers: pollHeaders });
const data = await res.json();
if (data.state === 'active') {
@@ -322,12 +324,16 @@ async function _createTopup() {
_btn('session-topup-create-btn', true);
try {
const nostrToken = await getOrRefreshToken('/api');
const topupHeaders = {
'Content-Type': 'application/json',
'Authorization': `Bearer ${_macaroon}`,
};
if (nostrToken) topupHeaders['X-Nostr-Token'] = nostrToken;
const res = await fetch(`${API}/sessions/${_sessionId}/topup`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${_macaroon}`,
},
headers: topupHeaders,
body: JSON.stringify({ amount_sats: _topupSats }),
});
const data = await res.json();
@@ -388,7 +394,9 @@ function _startTopupPolling() {
async function poll() {
if (!_sessionId) return;
try {
const res = await fetch(`${API}/sessions/${_sessionId}`);
const nostrToken = await getOrRefreshToken('/api');
const pollHeaders = nostrToken ? { 'X-Nostr-Token': nostrToken } : {};
const res = await fetch(`${API}/sessions/${_sessionId}`, { headers: pollHeaders });
const data = await res.json();
if (data.balanceSats > prevBalance || data.state === 'active') {
@@ -421,7 +429,9 @@ async function _tryRestore() {
if (!sessionId || !macaroon) return;
// Validate the session is still live
const res = await fetch(`${API}/sessions/${sessionId}`);
const nostrToken = await getOrRefreshToken('/api');
const restoreHeaders = nostrToken ? { 'X-Nostr-Token': nostrToken } : {};
const res = await fetch(`${API}/sessions/${sessionId}`, { headers: restoreHeaders });
if (!res.ok) { localStorage.removeItem(LS_KEY); return; }
const data = await res.json();

View File

@@ -150,10 +150,11 @@ function initInputBar() {
}
// ── Edge triage — classify text in the Web Worker ─────────────────────────
// Worker returns { complexity: 'trivial'|'moderate'|'complex', score, reason, localReply? }
const cls = await classify(text);
if (cls.label === 'local' && cls.localReply) {
// Trivial/conversational — answer locally, no server round-trip
if (cls.complexity === 'trivial' && cls.localReply) {
// Greeting / small-talk → answer locally, 0 sats, no network call
appendSystemMessage(`you: ${text}`);
appendSystemMessage(`Timmy [local]: ${cls.localReply}`);
_showCostPreview('answered locally ⚡ 0 sats', '#44dd88');
@@ -161,7 +162,15 @@ function initInputBar() {
return;
}
// Substantive — route to server via WebSocket
// moderate or complex — fetch cost estimate (driven by complexity outcome),
// then route to server. The preview is already shown via debounce on input,
// but we refresh it now with the actual classification context.
if (cls.complexity === 'moderate' || cls.complexity === 'complex') {
// Fire estimate fetch; don't await — let it update the badge async while WS is in flight
_fetchEstimate(text);
}
// Route to server via WebSocket
sendVisitorMessage(text);
appendSystemMessage(`you: ${text}`);
}