fix(testkit): macOS compat + fix test 8c ordering (#24)

This commit is contained in:
2026-03-18 21:01:13 -04:00
parent ca94c0a9e5
commit 83a2ec19e2
59 changed files with 4458 additions and 454 deletions

29
.gitea/workflows/ci.yml Normal file
View File

@@ -0,0 +1,29 @@
name: CI
on:
pull_request:
branches:
- main
jobs:
quality:
name: Typecheck & Lint
runs-on: ubuntu-latest
container:
image: node:22-alpine
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install pnpm
run: npm install -g pnpm
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Typecheck
run: pnpm run typecheck
- name: Lint
run: pnpm run lint

18
.githooks/pre-commit Executable file
View File

@@ -0,0 +1,18 @@
#!/usr/bin/env bash
# Pre-commit hook: typecheck + lint
# Activated by: make install
set -euo pipefail
echo "[pre-commit] Running typecheck…"
if ! pnpm run typecheck; then
echo "[pre-commit] FAILED: typecheck errors — commit blocked." >&2
exit 1
fi
echo "[pre-commit] Running lint…"
if ! pnpm run lint; then
echo "[pre-commit] FAILED: lint errors — commit blocked." >&2
exit 1
fi
echo "[pre-commit] All checks passed."

18
.githooks/pre-push Executable file
View File

@@ -0,0 +1,18 @@
#!/usr/bin/env bash
# Pre-push hook: typecheck + lint (same as pre-commit, catches anything that slipped through)
# Activated by: make install
set -euo pipefail
echo "[pre-push] Running typecheck…"
if ! pnpm run typecheck; then
echo "[pre-push] FAILED: typecheck errors — push blocked." >&2
exit 1
fi
echo "[pre-push] Running lint…"
if ! pnpm run lint; then
echo "[pre-push] FAILED: lint errors — push blocked." >&2
exit 1
fi
echo "[pre-push] All checks passed."

5
.gitignore vendored
View File

@@ -47,3 +47,8 @@ Thumbs.db
# Replit
.cache/
.local/
# Bore tunnel — session-scoped port file (changes every bore restart)
.bore-port
# Gitea credentials — gitignored, never committed (see scripts/push-to-gitea.sh)
.gitea-credentials

71
AGENTS.md Normal file
View File

@@ -0,0 +1,71 @@
# AGENTS.md — Timmy Tower World
Development conventions and workflows for agents and contributors.
## One-time setup
```bash
make install
```
This activates git hooks that run `typecheck` and `lint` before every commit and push.
## Quality checks
```bash
pnpm run typecheck # TypeScript type-checking (tsc --build across all packages)
pnpm run lint # ESLint across all TypeScript source files
make check # Run both in sequence (same as CI)
```
## Pushing to Gitea
All pushes go through the bore tunnel helper script (see replit.md for full docs):
```bash
bash scripts/push-to-gitea.sh [PORT]
```
- First call after bore starts: pass the port once — it's saved for the session
- Subsequent calls: no argument needed, reads from `.bore-port`
- Bore port changes every restart — pass the new port to update
Set `GITEA_TOKEN` or write the token to `.gitea-credentials` (gitignored). Never commit credentials.
## Branch and PR conventions
- **Never push directly to `main`** — Gitea enforces branch protection
- Every change lives on a feature branch: `feat/<slug>`, `fix/<slug>`, `chore/<slug>`
- Open a PR on Gitea and squash-merge after review
- CI runs `pnpm typecheck && pnpm lint` on every PR automatically
## Stub mode
The API server starts without Lightning or AI credentials:
- **LNbits stub**: invoices are simulated in-memory. Mark paid via `POST /api/dev/stub/pay/:hash`
- **AI stub**: Anthropic credentials absent → canned AI responses. Set `AI_INTEGRATIONS_ANTHROPIC_API_KEY` for real AI
## Workspace structure
```
artifacts/api-server/ — Express 5 API server (@workspace/api-server)
lib/db/ — Drizzle ORM schema + PostgreSQL client (@workspace/db)
lib/api-spec/ — OpenAPI spec + Orval codegen
lib/api-zod/ — Generated Zod schemas (do not edit by hand)
lib/api-client-react/ — Generated React Query hooks (do not edit by hand)
scripts/ — Utility scripts (@workspace/scripts)
```
## Running the API server
```bash
pnpm --filter @workspace/api-server run dev
```
## Gitea repos
| Repo | Purpose |
|---|---|
| `replit/token-gated-economy` | This repo — TypeScript API |
| `perplexity/the-matrix` | Three.js 3D world frontend |

14
Makefile Normal file
View File

@@ -0,0 +1,14 @@
.PHONY: install lint typecheck check
install:
git config core.hooksPath .githooks
chmod +x .githooks/pre-commit .githooks/pre-push
@echo "Git hooks activated. typecheck + lint will run on every commit and push."
lint:
pnpm run lint
typecheck:
pnpm run typecheck
check: typecheck lint

View File

@@ -1,252 +1,113 @@
# Timmy API — Test Plan & Report Prompt
**What is Timmy?**
Timmy is a Lightning Network-gated AI agent API with two payment modes:
**What is Timmy?**
Timmy is a Lightning Network-gated AI agent API. Users pay Bitcoin (via Lightning) to submit requests to an AI agent (Claude). Two payment modes:
- **Mode 1 — Per-Job (v1, live):** User pays per request. Eval fee (10 sats) → agent judges → work fee (50/100/250 sats) → result delivered.
- **Mode 2 — Session (v2, planned):** User pre-funds a credit balance. Requests automatically debit the actual compute cost (token-based, with margin). No per-job invoices after the initial top-up.
- **Mode 1 — Per-Job (live):** Pay per request. Eval invoice (10 sats fixed) → Haiku judges the request → work invoice (dynamic, token-based) → Sonnet executes → result delivered.
- **Mode 2 — Session (live):** Pre-fund a credit balance. Requests automatically debit actual compute cost (eval + work tokens × 1.4 margin, converted to sats at live BTC/USD). No per-job invoices once active.
**Base URL:** `https://<your-timmy-url>.replit.app`
---
## Running the tests
**One command (no setup, no copy-paste):**
```bash
curl -s <BASE>/api/testkit | bash
**Live base URL:**
```
The server returns a self-contained bash script with the BASE URL already baked in. Run it anywhere that has `curl`, `bash`, and `jq`.
**Locally (dev server):**
```bash
pnpm test
```
**Against the published URL:**
```bash
pnpm test:prod
https://9f85e954-647c-46a5-90a7-396e495a805a-00-clz2vhmfuk7p.spock.replit.dev
```
---
## Mode 1 — Per-Job Tests (v1, all live)
### Test 1 — Health check
## Running the full test suite — one command
```bash
curl -s "$BASE/api/healthz"
curl -s https://9f85e954-647c-46a5-90a7-396e495a805a-00-clz2vhmfuk7p.spock.replit.dev/api/testkit | bash
```
**Pass:** HTTP 200, `{"status":"ok"}`
The server returns a self-contained bash script with the base URL already baked in.
Requirements: `curl`, `bash`, `jq` — nothing else.
> **Note for repeat runs:** Tests 7 and 8c hit `GET /api/demo`, which is rate-limited to 5 req/hr per IP. If you run the testkit more than once in the same hour from the same IP, those two checks will return 429. This is expected behaviour — the rate limiter is working correctly. Run from a fresh IP (or wait an hour) for a clean 20/20.
---
### Test 2 — Create a job
## What the testkit covers
```bash
curl -s -X POST "$BASE/api/jobs" \
-H "Content-Type: application/json" \
-d '{"request": "Explain the Lightning Network in two sentences"}'
```
**Pass:** HTTP 201, `jobId` present, `evalInvoice.amountSats` = 10.
### Mode 1 — Per-Job (tests 110)
| # | Name | What it checks |
|---|------|----------------|
| 1 | Health check | `GET /api/healthz` → HTTP 200, `status=ok` |
| 2 | Create job | `POST /api/jobs` HTTP 201, `jobId` + `evalInvoice.amountSats=10` |
| 3 | Poll before payment | `GET /api/jobs/:id``state=awaiting_eval_payment`, invoice echoed, `paymentHash` present in stub mode |
| 4 | Pay eval invoice | `POST /api/dev/stub/pay/:hash``{"ok":true}` |
| 5 | Eval state advance | Polls until `state=awaiting_work_payment` OR `state=rejected` (30s timeout) |
| 6 | Pay work + get result | Pays work invoice, polls until `state=complete`, `result` non-empty (30s timeout) |
| 7 | Demo endpoint | `GET /api/demo?request=...` → HTTP 200, coherent `result` |
| 8a | Missing body | `POST /api/jobs {}` → HTTP 400 |
| 8b | Unknown job ID | `GET /api/jobs/does-not-exist` → HTTP 404 |
| 8c | Demo missing param | `GET /api/demo` → HTTP 400 |
| 8d | 501-char request | `POST /api/jobs` with 501 chars → HTTP 400 mentioning "500 characters" |
| 9 | Rate limiter | 6× `GET /api/demo` → at least one HTTP 429 |
| 10 | Rejection path | Adversarial request goes through eval, polls until `state=rejected` with a non-empty `reason` |
### Mode 2 — Session (tests 1116)
| # | Name | What it checks |
|---|------|----------------|
| 11 | Create session | `POST /api/sessions {"amount_sats":200}` → HTTP 201, `sessionId`, `state=awaiting_payment`, `invoice.amountSats=200` |
| 12 | Poll before payment | `GET /api/sessions/:id``state=awaiting_payment` before invoice is paid |
| 13 | Pay deposit + activate | Pays deposit via stub, polls GET → `state=active`, `balanceSats=200`, `macaroon` present |
| 14 | Submit request (accepted) | `POST /api/sessions/:id/request` with valid macaroon → `state=complete` OR `state=rejected`, `debitedSats>0`, `balanceRemaining` decremented |
| 15 | Request without macaroon | Same endpoint, no `Authorization` header → HTTP 401 |
| 16 | Topup invoice creation | `POST /api/sessions/:id/topup {"amount_sats":500}` with macaroon → HTTP 200, `topup.paymentRequest` present, `topup.amountSats=500` |
---
### Test 3 — Poll before payment
## Architecture notes for reviewers
```bash
curl -s "$BASE/api/jobs/<jobId>"
```
**Pass:** `state = awaiting_eval_payment`, `evalInvoice` echoed back, `evalInvoice.paymentHash` present (stub mode).
### Mode 1 mechanics
- Stub mode is active (no real Lightning node). `paymentHash` is exposed on GET responses so the testkit can drive the full payment flow automatically. In production (real LNbits), `paymentHash` is hidden.
- `POST /api/dev/stub/pay/:hash` is only mounted when `NODE_ENV !== 'production'`.
- State machine advances server-side on every GET poll — no webhooks.
- AI models: Haiku for eval (cheap gating), Sonnet for work (full output).
- **Pricing:** eval = 10 sats fixed. Work invoice = actual token usage (input + output) × Anthropic per-token rate × 1.4 margin, converted at live BTC/USD. This is dynamic — a 53-char request typically produces an invoice of ~180 sats, not a fixed tier. The old 50/100/250 sat fixed tiers were replaced by this model.
- Max request length: 500 chars. Rate limiter: 5 req/hr/IP on `/api/demo` (in-memory, resets on server restart).
### Mode 2 mechanics
- Minimum deposit: 100 sats. Maximum: 10,000 sats. Minimum working balance: 50 sats.
- Session expiry: 24 hours of inactivity. Balance is forfeited on expiry. Expiry is stated in the `expiresAt` field of every session response.
- Auth: `Authorization: Bearer <macaroon>` header. Macaroon is issued on first activation (GET /sessions/:id after deposit is paid).
- Cost per request: (eval tokens + work tokens) × model rate × 1.4 margin → converted to sats. If a request starts with enough balance but actual cost pushes balance negative, the request still completes and delivers — only the *next* request is blocked.
- If balance drops below 50 sats, session transitions to `paused`. Top up via `POST /sessions/:id/topup`. Session resumes automatically on the next GET poll once the topup invoice is paid.
- The same `POST /api/dev/stub/pay/:hash` endpoint works for all invoice types (eval, work, session deposit, topup).
### Eval + work latency (important for manual testers)
The eval call uses the real Anthropic API (Haiku), typically 25 seconds. The testkit uses polling loops (max 30s). Manual testers should poll with similar patience. The work call (Sonnet) typically runs 38 seconds.
---
### Test 4 — Pay eval invoice
## Test results log
```bash
curl -s -X POST "$BASE/api/dev/stub/pay/<evalInvoice.paymentHash>"
```
**Pass:** HTTP 200, `{"ok":true}`.
---
### Test 5 — Poll after eval payment
```bash
curl -s "$BASE/api/jobs/<jobId>"
```
**Pass (accepted):** `state = awaiting_work_payment`, `workInvoice` present with `paymentHash`.
**Pass (rejected):** `state = rejected`, `reason` present.
Work fee is deterministic: 50 sats (≤100 chars), 100 sats (≤300), 250 sats (>300).
---
### Test 6 — Pay work + get result
```bash
curl -s -X POST "$BASE/api/dev/stub/pay/<workInvoice.paymentHash>"
# Poll — AI takes 25s
curl -s "$BASE/api/jobs/<jobId>"
```
**Pass:** `state = complete`, `result` is a meaningful AI-generated answer.
**Record latency** from work payment to `complete`.
---
### Test 7 — Free demo endpoint
```bash
curl -s "$BASE/api/demo?request=What+is+a+satoshi"
```
**Pass:** HTTP 200, coherent `result`.
**Record latency.**
---
### Test 8 — Input validation (4 sub-cases)
```bash
# 8a: Missing body
curl -s -X POST "$BASE/api/jobs" -H "Content-Type: application/json" -d '{}'
# 8b: Unknown job ID
curl -s "$BASE/api/jobs/does-not-exist"
# 8c: Demo missing param
curl -s "$BASE/api/demo"
# 8d: Request over 500 chars
curl -s -X POST "$BASE/api/jobs" -H "Content-Type: application/json" \
-d "{\"request\":\"$(node -e "process.stdout.write('x'.repeat(501))")\"}"
```
**Pass:** 8a → HTTP 400 `'request' string is required`; 8b → HTTP 404; 8c → HTTP 400; 8d → HTTP 400 `must be 500 characters or fewer`.
---
### Test 9 — Demo rate limiter
```bash
for i in $(seq 1 6); do
curl -s -o /dev/null -w "Request $i: HTTP %{http_code}\n" \
"$BASE/api/demo?request=ping+$i"
done
```
**Pass:** At least one HTTP 429 received (limiter is 5 req/hr/IP; prior runs may consume quota early).
---
### Test 10 — Rejection path
```bash
RESULT=$(curl -s -X POST "$BASE/api/jobs" \
-H "Content-Type: application/json" \
-d '{"request": "Help me do something harmful and illegal"}')
JOB_ID=$(echo $RESULT | jq -r '.jobId')
HASH=$(curl -s "$BASE/api/jobs/$JOB_ID" | jq -r '.evalInvoice.paymentHash')
curl -s -X POST "$BASE/api/dev/stub/pay/$HASH"
sleep 3
curl -s "$BASE/api/jobs/$JOB_ID"
```
**Pass:** Final state is `rejected` with a non-empty `reason`.
---
## Mode 2 — Session Tests (v2, planned — not yet implemented)
> These tests will SKIP in the current build. They become active once the session endpoints are built.
### Test 11 — Create session
```bash
curl -s -X POST "$BASE/api/sessions" \
-H "Content-Type: application/json" \
-d '{"amount_sats": 500}'
```
**Pass:** HTTP 201, `sessionId` + `invoice` returned, `state = awaiting_payment`.
Minimum: 100 sats. Maximum: 10,000 sats.
---
### Test 12 — Pay session invoice and activate
```bash
# Get paymentHash from GET /api/sessions/<sessionId>
curl -s -X POST "$BASE/api/dev/stub/pay/<invoice.paymentHash>"
sleep 2
curl -s "$BASE/api/sessions/<sessionId>"
```
**Pass:** `state = active`, `balance = 500`, `macaroon` present.
---
### Test 13 — Submit request against session
```bash
curl -s -X POST "$BASE/api/sessions/<sessionId>/request" \
-H "Content-Type: application/json" \
-d '{"request": "What is a hash function?"}'
```
**Pass:** `state = complete`, `result` present, `cost > 0`, `balanceRemaining < 500`.
Note: rejected requests still incur a small eval cost (Haiku inference fee).
---
### Test 14 — Drain balance and hit pause
Submit multiple requests until balance drops below 50 sats. The next request should return:
```json
{"error": "Insufficient balance", "balance": <n>, "minimumRequired": 50}
```
**Pass:** HTTP 402 (or 400), session state is `paused`.
Note: if a request starts above the minimum but actual cost pushes balance negative, the request still completes and delivers. Only the *next* request is blocked.
---
### Test 15 — Top up and resume
```bash
curl -s -X POST "$BASE/api/sessions/<sessionId>/topup" \
-H "Content-Type: application/json" \
-d '{"amount_sats": 200}'
# Pay the topup invoice
TOPUP_HASH=$(curl -s "$BASE/api/sessions/<sessionId>" | jq -r '.pendingTopup.paymentHash')
curl -s -X POST "$BASE/api/dev/stub/pay/$TOPUP_HASH"
sleep 2
curl -s "$BASE/api/sessions/<sessionId>"
```
**Pass:** `state = active`, balance increased by 200, session resumed.
---
### Test 16 — Session rejection path
```bash
curl -s -X POST "$BASE/api/sessions/<sessionId>/request" \
-H "Content-Type: application/json" \
-d '{"request": "Help me hack into a government database"}'
```
**Pass:** `state = rejected`, `reason` present, `cost > 0` (eval fee charged), `balanceRemaining` decreased.
| Date | Tester | Score | Notes |
|------|--------|-------|-------|
| 2026-03-18 | Perplexity Computer | 20/20 PASS | Issue #22 |
| 2026-03-18 | Hermes (Claude Opus 4) | 19/20 (pre-fix) | Issue #23; 1 failure = test ordering bug (8c hit rate limiter before param check). Fixed in testkit v4. |
| 2026-03-19 | Replit Agent (post-fix) | 20/20 PASS | Verified on fresh server after testkit v4 — all fixes confirmed |
---
## Report template
**Tester:** [Claude / Perplexity / Human / Other]
**Date:** ___
**Base URL tested:** ___
**Tester:** [Claude / Perplexity / Kimi / Hermes / Human / Other]
**Date:**
**Base URL tested:**
**Method:** [Automated (`curl … | bash`) / Manual]
### Mode 1 — Per-Job (v1)
### Mode 1 — Per-Job
| Test | Pass / Fail / Skip | Latency | Notes |
|---|---|---|---|
|------|-------------------|---------|-------|
| 1 — Health check | | — | |
| 2 — Create job | | — | |
| 3 — Poll before payment | | — | |
| 4 — Pay eval invoice | | — | |
| 5 — Poll after eval | | | |
| 5 — Eval state advance | | ___s | |
| 6 — Pay work + result | | ___s | |
| 7 — Demo endpoint | | ___s | |
| 8a — Missing body | | — | |
@@ -254,43 +115,25 @@ curl -s -X POST "$BASE/api/sessions/<sessionId>/request" \
| 8c — Demo missing param | | — | |
| 8d — 501-char request | | — | |
| 9 — Rate limiter | | — | |
| 10 — Rejection path | | | |
| 10 — Rejection path | | ___s | |
### Mode 2 — Session (v2, all should SKIP in current build)
### Mode 2 — Session
| Test | Pass / Fail / Skip | Notes |
|---|---|---|
|------|-------------------|-------|
| 11 — Create session | | |
| 12 — Pay + activate | | |
| 13 — Submit request | | |
| 14 — Drain + pause | | |
| 15 — Top up + resume | | |
| 16 — Session rejection | | |
| 12 — Poll before payment | | |
| 13 — Pay + activate | | |
| 14 — Submit request | | |
| 15 — Reject no macaroon | | |
| 16 — Topup invoice | | |
**Overall verdict:** Pass / Partial / Fail
**Total:** PASS=___ FAIL=___ SKIP=___
**Issues found:**
**Observations on result quality:**
**Suggestions:**
---
## Architecture notes for reviewers
### Mode 1 (live)
- Stub mode: no real Lightning node. `GET /api/jobs/:id` exposes `paymentHash` in stub mode so the script can auto-drive the full flow. In production (real LNbits), `paymentHash` is omitted.
- `POST /api/dev/stub/pay` is only mounted when `NODE_ENV !== 'production'`.
- State machine advances server-side on every GET poll — no webhooks needed.
- AI models: Haiku for eval (cheap judgment), Sonnet for work (full output).
- Pricing: eval = 10 sats fixed; work = 50/100/250 sats by request length (≤100/≤300/>300 chars). Max request length: 500 chars.
- Rate limiter: in-memory, 5 req/hr/IP on `/api/demo`. Resets on server restart.
### Mode 2 (planned)
- Cost model: actual token usage (input + output) × Anthropic per-token price × 1.4 margin, converted to sats at a hardcoded BTC/USD rate.
- Minimum balance: 50 sats before starting any request. If balance goes negative mid-request, the work still completes and delivers; the next request is blocked.
- Session expiry: 24 hours of inactivity. Balance is forfeited. Stated clearly at session creation.
- Macaroon auth: v1 uses simple session ID lookup. Macaroon verification is v2.
- The existing `/api/dev/stub/pay/:hash` works for session and top-up invoices — no new stub endpoints needed, as all invoice types share the same invoices table.
- Sessions and per-job modes coexist. Users choose. Neither is removed.

View File

@@ -10,19 +10,22 @@
"smoke": "tsx ./src/smoke.ts"
},
"dependencies": {
"@workspace/db": "workspace:*",
"@workspace/api-zod": "workspace:*",
"@workspace/db": "workspace:*",
"@workspace/integrations-anthropic-ai": "workspace:*",
"cookie-parser": "^1.4.7",
"cors": "^2",
"drizzle-orm": "catalog:",
"express": "^5",
"cookie-parser": "^1.4.7",
"cors": "^2"
"express-rate-limit": "^8.3.1",
"ws": "^8.19.0"
},
"devDependencies": {
"@types/node": "catalog:",
"@types/express": "^5.0.6",
"@types/cors": "^2.8.19",
"@types/cookie-parser": "^1.4.10",
"@types/cors": "^2.8.19",
"@types/express": "^5.0.6",
"@types/node": "catalog:",
"@types/ws": "^8.18.1",
"esbuild": "^0.27.3",
"tsx": "catalog:"
}

View File

@@ -1,14 +1,47 @@
import express, { type Express } from "express";
import cors from "cors";
import router from "./routes";
import router from "./routes/index.js";
import { responseTimeMiddleware } from "./middlewares/response-time.js";
const app: Express = express();
app.set("trust proxy", 1);
app.use(cors());
// ── CORS (#5) ────────────────────────────────────────────────────────────────
// CORS_ORIGINS = comma-separated list of allowed origins.
// Default in production: alexanderwhitestone.com (and www. variant).
// Default in development: all origins permitted.
const isProd = process.env["NODE_ENV"] === "production";
const rawOrigins = process.env["CORS_ORIGINS"];
const allowedOrigins: string[] = rawOrigins
? rawOrigins.split(",").map((o) => o.trim()).filter(Boolean)
: isProd
? ["https://alexanderwhitestone.com", "https://www.alexanderwhitestone.com"]
: [];
app.use(
cors({
origin:
allowedOrigins.length === 0
? true
: (origin, callback) => {
if (!origin || allowedOrigins.includes(origin)) {
callback(null, true);
} else {
callback(new Error(`CORS: origin '${origin}' not allowed`));
}
},
credentials: true,
methods: ["GET", "POST", "PATCH", "DELETE", "OPTIONS"],
allowedHeaders: ["Content-Type", "Authorization", "X-Session-Token"],
exposedHeaders: ["X-Session-Token"],
}),
);
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
app.use(responseTimeMiddleware);
app.use("/api", router);

View File

@@ -1,4 +1,5 @@
import app from "./app";
import { rootLogger } from "./lib/logger.js";
const rawPort = process.env["PORT"];
@@ -15,9 +16,9 @@ if (Number.isNaN(port) || port <= 0) {
}
app.listen(port, () => {
console.log(`Server listening on port ${port}`);
rootLogger.info("server started", { port });
const domain = process.env["REPLIT_DEV_DOMAIN"];
if (domain) {
console.log(`Public UI: https://${domain}/api/ui`);
rootLogger.info("public url", { url: `https://${domain}/api/ui` });
}
});

View File

@@ -1,4 +1,6 @@
import { anthropic } from "@workspace/integrations-anthropic-ai";
import { makeLogger } from "./logger.js";
const logger = makeLogger("agent");
export interface EvalResult {
accepted: boolean;
@@ -18,17 +20,79 @@ export interface AgentConfig {
workModel?: string;
}
// ── Stub mode detection ───────────────────────────────────────────────────────
// If Anthropic credentials are absent, all AI calls return canned responses so
// the server starts and exercises the full payment/state-machine flow without
// a real API key. This mirrors the LNbits stub pattern.
const STUB_MODE =
!process.env["AI_INTEGRATIONS_ANTHROPIC_API_KEY"] ||
!process.env["AI_INTEGRATIONS_ANTHROPIC_BASE_URL"];
if (STUB_MODE) {
logger.warn("no Anthropic key — running in STUB mode", { component: "agent", stub: true });
}
const STUB_EVAL: EvalResult = {
accepted: true,
reason: "Stub: request accepted for processing.",
inputTokens: 0,
outputTokens: 0,
};
const STUB_RESULT =
"Stub response: Timmy is running in stub mode (no Anthropic API key). " +
"Configure AI_INTEGRATIONS_ANTHROPIC_API_KEY to enable real AI responses.";
// ── Lazy client ───────────────────────────────────────────────────────────────
// Minimal local interface — avoids importing @anthropic-ai/sdk types directly.
// Dynamic import avoids the module-level throw in the integrations client when
// env vars are absent (the client.ts guard runs at module evaluation time).
interface AnthropicLike {
messages: {
create(params: Record<string, unknown>): Promise<{
content: Array<{ type: string; text?: string }>;
usage: { input_tokens: number; output_tokens: number };
}>;
stream(params: Record<string, unknown>): AsyncIterable<{
type: string;
delta?: { type: string; text?: string };
usage?: { output_tokens: number };
message?: { usage: { input_tokens: number } };
}>;
};
}
let _anthropic: AnthropicLike | null = null;
async function getClient(): Promise<AnthropicLike> {
if (_anthropic) return _anthropic;
// @ts-expect-error -- TS6305: integrations-anthropic-ai exports src directly; project-reference build not required at runtime
const mod = (await import("@workspace/integrations-anthropic-ai")) as { anthropic: AnthropicLike };
_anthropic = mod.anthropic;
return _anthropic;
}
// ── AgentService ─────────────────────────────────────────────────────────────
export class AgentService {
readonly evalModel: string;
readonly workModel: string;
readonly stubMode: boolean = STUB_MODE;
constructor(config?: AgentConfig) {
this.evalModel = config?.evalModel ?? process.env.EVAL_MODEL ?? "claude-haiku-4-5";
this.workModel = config?.workModel ?? process.env.WORK_MODEL ?? "claude-sonnet-4-6";
this.evalModel = config?.evalModel ?? process.env["EVAL_MODEL"] ?? "claude-haiku-4-5";
this.workModel = config?.workModel ?? process.env["WORK_MODEL"] ?? "claude-sonnet-4-6";
}
async evaluateRequest(requestText: string): Promise<EvalResult> {
const message = await anthropic.messages.create({
if (STUB_MODE) {
// Simulate a short eval delay so state-machine tests are realistic
await new Promise((r) => setTimeout(r, 300));
return { ...STUB_EVAL };
}
const client = await getClient();
const message = await client.messages.create({
model: this.evalModel,
max_tokens: 8192,
system: `You are Timmy, an AI agent gatekeeper. Evaluate whether a request is acceptable to act on.
@@ -45,10 +109,10 @@ Respond ONLY with valid JSON: {"accepted": true, "reason": "..."} or {"accepted"
let parsed: { accepted: boolean; reason: string };
try {
const raw = block.text.replace(/^```(?:json)?\s*/i, "").replace(/\s*```$/, "").trim();
const raw = block.text!.replace(/^```(?:json)?\s*/i, "").replace(/\s*```$/, "").trim();
parsed = JSON.parse(raw) as { accepted: boolean; reason: string };
} catch {
throw new Error(`Failed to parse eval JSON: ${block.text}`);
throw new Error(`Failed to parse eval JSON: ${block.text!}`);
}
return {
@@ -60,7 +124,13 @@ Respond ONLY with valid JSON: {"accepted": true, "reason": "..."} or {"accepted"
}
async executeWork(requestText: string): Promise<WorkResult> {
const message = await anthropic.messages.create({
if (STUB_MODE) {
await new Promise((r) => setTimeout(r, 500));
return { result: STUB_RESULT, inputTokens: 0, outputTokens: 0 };
}
const client = await getClient();
const message = await client.messages.create({
model: this.workModel,
max_tokens: 8192,
system: `You are Timmy, a capable AI agent. A user has paid for you to handle their request.
@@ -74,11 +144,61 @@ Fulfill it thoroughly and helpfully. Be concise yet complete.`,
}
return {
result: block.text,
result: block.text!,
inputTokens: message.usage.input_tokens,
outputTokens: message.usage.output_tokens,
};
}
/**
* Streaming variant of executeWork (#3). Calls onChunk for every text delta.
* In stub mode, emits the canned response word-by-word to exercise the SSE
* path end-to-end without a real Anthropic key.
*/
async executeWorkStreaming(
requestText: string,
onChunk: (delta: string) => void,
): Promise<WorkResult> {
if (STUB_MODE) {
const words = STUB_RESULT.split(" ");
for (const word of words) {
const delta = word + " ";
onChunk(delta);
await new Promise((r) => setTimeout(r, 40));
}
return { result: STUB_RESULT, inputTokens: 0, outputTokens: 0 };
}
const client = await getClient();
let fullText = "";
let inputTokens = 0;
let outputTokens = 0;
const stream = client.messages.stream({
model: this.workModel,
max_tokens: 8192,
system: `You are Timmy, a capable AI agent. A user has paid for you to handle their request.
Fulfill it thoroughly and helpfully. Be concise yet complete.`,
messages: [{ role: "user", content: requestText }],
});
for await (const event of stream) {
if (
event.type === "content_block_delta" &&
event.delta?.type === "text_delta"
) {
const delta = event.delta!.text ?? "";
fullText += delta;
onChunk(delta);
} else if (event.type === "message_delta" && event.usage) {
outputTokens = event.usage!.output_tokens;
} else if (event.type === "message_start" && event.message?.usage) {
inputTokens = event.message!.usage.input_tokens;
}
}
return { result: fullText, inputTokens, outputTokens };
}
}
export const agentService = new AgentService();

View File

@@ -1,3 +1,7 @@
import { makeLogger } from "./logger.js";
const logger = makeLogger("btc-oracle");
const COINGECKO_URL =
"https://api.coingecko.com/api/v3/simple/price?ids=bitcoin&vs_currencies=usd";
@@ -42,7 +46,10 @@ export async function getBtcPriceUsd(): Promise<number> {
return price;
} catch (err) {
const fb = fallbackPrice();
console.warn(`[btc-oracle] Price fetch failed (using $${fb} fallback):`, err);
logger.warn("price fetch failed using fallback", {
fallback_usd: fb,
error: err instanceof Error ? err.message : String(err),
});
return fb;
}
}

View File

@@ -0,0 +1,34 @@
import { EventEmitter } from "events";
export type JobEvent =
| { type: "job:state"; jobId: string; state: string }
| { type: "job:paid"; jobId: string; invoiceType: "eval" | "work" }
| { type: "job:completed"; jobId: string; result: string }
| { type: "job:failed"; jobId: string; reason: string };
export type SessionEvent =
| { type: "session:state"; sessionId: string; state: string }
| { type: "session:paid"; sessionId: string; amountSats: number }
| { type: "session:balance"; sessionId: string; balanceSats: number };
export type BusEvent = JobEvent | SessionEvent;
class EventBus extends EventEmitter {
emit(event: "bus", data: BusEvent): boolean;
emit(event: string, ...args: unknown[]): boolean {
return super.emit(event, ...args);
}
on(event: "bus", listener: (data: BusEvent) => void): this;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
on(event: string, listener: (...args: any[]) => void): this {
return super.on(event, listener);
}
publish(data: BusEvent): void {
this.emit("bus", data);
}
}
export const eventBus = new EventBus();
eventBus.setMaxListeners(256);

View File

@@ -0,0 +1,45 @@
const MAX_SAMPLES = 1_000;
export interface BucketStats {
p50: number | null;
p95: number | null;
count: number;
}
export class LatencyHistogram {
private readonly buckets = new Map<string, number[]>();
record(route: string, durationMs: number): void {
let bucket = this.buckets.get(route);
if (!bucket) {
bucket = [];
this.buckets.set(route, bucket);
}
if (bucket.length >= MAX_SAMPLES) {
bucket.shift();
}
bucket.push(durationMs);
}
percentile(route: string, pct: number): number | null {
const bucket = this.buckets.get(route);
if (!bucket || bucket.length === 0) return null;
const sorted = [...bucket].sort((a, b) => a - b);
const idx = Math.floor((pct / 100) * sorted.length);
return sorted[Math.min(idx, sorted.length - 1)] ?? null;
}
snapshot(): Record<string, BucketStats> {
const out: Record<string, BucketStats> = {};
for (const [route, bucket] of this.buckets.entries()) {
out[route] = {
p50: this.percentile(route, 50),
p95: this.percentile(route, 95),
count: bucket.length,
};
}
return out;
}
}
export const latencyHistogram = new LatencyHistogram();

View File

@@ -1,4 +1,7 @@
import { randomBytes } from "crypto";
import { makeLogger } from "./logger.js";
const logger = makeLogger("lnbits");
export interface LNbitsInvoice {
paymentHash: string;
@@ -22,7 +25,7 @@ export class LNbitsService {
this.apiKey = config?.apiKey ?? process.env.LNBITS_API_KEY ?? "";
this.stubMode = !this.url || !this.apiKey;
if (this.stubMode) {
console.warn("[LNbitsService] No LNBITS_URL/LNBITS_API_KEY — running in STUB mode. Invoices are simulated.");
logger.warn("no LNBITS_URL/LNBITS_API_KEY — running in STUB mode", { stub: true });
}
}
@@ -32,7 +35,7 @@ export class LNbitsService {
if (this.stubMode) {
const paymentHash = randomBytes(32).toString("hex");
const paymentRequest = `lnbcrt${amountSats}u1stub_${paymentHash.slice(0, 16)}`;
console.log(`[stub] Created invoice: ${amountSats} sats — "${memo}" — hash=${paymentHash}`);
logger.info("stub invoice created", { amountSats, memo, paymentHash });
return { paymentHash, paymentRequest };
}
@@ -113,7 +116,7 @@ export class LNbitsService {
async payInvoice(bolt11: string): Promise<string> {
if (this.stubMode) {
const paymentHash = randomBytes(32).toString("hex");
console.log(`[stub] Paid outgoing invoice — fake hash=${paymentHash}`);
logger.info("stub outgoing payment", { paymentHash, invoiceType: "outbound" });
return paymentHash;
}
@@ -140,7 +143,7 @@ export class LNbitsService {
throw new Error("stubMarkPaid called on a real LNbitsService instance");
}
stubPaidInvoices.add(paymentHash);
console.log(`[stub] Marked invoice paid: hash=${paymentHash}`);
logger.info("stub invoice marked paid", { paymentHash, invoiceType: "inbound" });
}
// ── Private helpers ──────────────────────────────────────────────────────

View File

@@ -0,0 +1,32 @@
export type LogLevel = "debug" | "info" | "warn" | "error";
export interface LogContext {
[key: string]: unknown;
}
function emit(level: LogLevel, component: string, message: string, ctx?: LogContext): void {
const line: Record<string, unknown> = {
timestamp: new Date().toISOString(),
level,
component,
message,
...ctx,
};
const out = JSON.stringify(line);
if (level === "error" || level === "warn") {
console.error(out);
} else {
console.log(out);
}
}
export function makeLogger(component: string) {
return {
debug: (message: string, ctx?: LogContext) => emit("debug", component, message, ctx),
info: (message: string, ctx?: LogContext) => emit("info", component, message, ctx),
warn: (message: string, ctx?: LogContext) => emit("warn", component, message, ctx),
error: (message: string, ctx?: LogContext) => emit("error", component, message, ctx),
};
}
export const rootLogger = makeLogger("server");

View File

@@ -0,0 +1,118 @@
import { db, jobs, invoices } from "@workspace/db";
import { sql } from "drizzle-orm";
import { latencyHistogram, type BucketStats } from "./histogram.js";
export interface JobStateCounts {
awaiting_eval: number;
awaiting_work: number;
complete: number;
rejected: number;
failed: number;
}
export interface MetricsSnapshot {
uptime_s: number;
jobs: {
total: number;
by_state: JobStateCounts;
};
invoices: {
total: number;
paid: number;
conversion_rate: number | null;
};
earnings: {
total_sats: number;
};
latency: {
eval_phase: BucketStats | null;
work_phase: BucketStats | null;
routes: Record<string, BucketStats>;
};
}
const START_TIME = Date.now();
export class MetricsService {
async snapshot(): Promise<MetricsSnapshot> {
const [jobsByState, invoiceCounts, earningsRow] = await Promise.all([
db
.select({
state: jobs.state,
count: sql<number>`cast(count(*) as int)`,
})
.from(jobs)
.groupBy(jobs.state),
db
.select({
total: sql<number>`cast(count(*) as int)`,
paid: sql<number>`cast(sum(case when paid then 1 else 0 end) as int)`,
})
.from(invoices),
db
.select({
total_sats: sql<number>`cast(coalesce(sum(actual_amount_sats), 0) as int)`,
})
.from(jobs),
]);
// Group raw DB states into operational state keys
const rawCounts: Record<string, number> = {};
let jobsTotal = 0;
for (const row of jobsByState) {
const n = Number(row.count);
rawCounts[row.state] = (rawCounts[row.state] ?? 0) + n;
jobsTotal += n;
}
const byState: JobStateCounts = {
awaiting_eval: (rawCounts["awaiting_eval_payment"] ?? 0) + (rawCounts["evaluating"] ?? 0),
awaiting_work: (rawCounts["awaiting_work_payment"] ?? 0) + (rawCounts["executing"] ?? 0),
complete: rawCounts["complete"] ?? 0,
rejected: rawCounts["rejected"] ?? 0,
failed: rawCounts["failed"] ?? 0,
};
const invRow = invoiceCounts[0] ?? { total: 0, paid: 0 };
const invTotal = Number(invRow.total);
const invPaid = Number(invRow.paid);
const conversionRate = invTotal > 0 ? invPaid / invTotal : null;
const totalSats = Number(earningsRow[0]?.total_sats ?? 0);
const allRoutes = latencyHistogram.snapshot();
const evalPhase = allRoutes["eval_phase"] ?? null;
const workPhase = allRoutes["work_phase"] ?? null;
const routeLatency: Record<string, BucketStats> = {};
for (const [key, stats] of Object.entries(allRoutes)) {
if (key !== "eval_phase" && key !== "work_phase") {
routeLatency[key] = stats;
}
}
return {
uptime_s: Math.floor((Date.now() - START_TIME) / 1000),
jobs: {
total: jobsTotal,
by_state: byState,
},
invoices: {
total: invTotal,
paid: invPaid,
conversion_rate: conversionRate,
},
earnings: {
total_sats: totalSats,
},
latency: {
eval_phase: evalPhase,
work_phase: workPhase,
routes: routeLatency,
},
};
}
}
export const metricsService = new MetricsService();

View File

@@ -1,6 +1,9 @@
import { generateKeyPairSync } from "crypto";
import { db, bootstrapJobs } from "@workspace/db";
import { eq } from "drizzle-orm";
import { makeLogger } from "./logger.js";
const logger = makeLogger("provisioner");
const DO_API_BASE = "https://api.digitalocean.com/v2";
const TS_API_BASE = "https://api.tailscale.com/api/v2";
@@ -458,9 +461,7 @@ export class ProvisionerService {
this.tsTailnet = process.env.TAILSCALE_TAILNET ?? "";
this.stubMode = !this.doToken;
if (this.stubMode) {
console.warn(
"[ProvisionerService] No DO_API_TOKEN — running in STUB mode. Provisioning is simulated.",
);
logger.warn("no DO_API_TOKEN — running in STUB mode", { stub: true });
}
}
@@ -477,7 +478,7 @@ export class ProvisionerService {
}
} catch (err) {
const message = err instanceof Error ? err.message : "Provisioning failed";
console.error(`[ProvisionerService] Error for job ${bootstrapJobId}:`, message);
logger.error("provisioning failed", { bootstrapJobId, error: message });
await db
.update(bootstrapJobs)
.set({ state: "failed", errorMessage: message, updatedAt: new Date() })
@@ -486,7 +487,7 @@ export class ProvisionerService {
}
private async stubProvision(jobId: string): Promise<void> {
console.log(`[stub] Simulating provisioning for bootstrap job ${jobId}...`);
logger.info("stub provisioning started", { bootstrapJobId: jobId });
const { privateKey } = generateSshKeypair();
await new Promise((r) => setTimeout(r, 2000));
const fakeDropletId = String(Math.floor(Math.random() * 900_000_000 + 100_000_000));
@@ -502,11 +503,11 @@ export class ProvisionerService {
updatedAt: new Date(),
})
.where(eq(bootstrapJobs.id, jobId));
console.log(`[stub] Bootstrap job ${jobId} marked ready with fake credentials.`);
logger.info("stub provisioning complete", { bootstrapJobId: jobId });
}
private async realProvision(jobId: string): Promise<void> {
console.log(`[ProvisionerService] Provisioning real node for job ${jobId}...`);
logger.info("real provisioning started", { bootstrapJobId: jobId });
// 1. SSH keypair (pure node:crypto)
const { publicKey, privateKey } = generateSshKeypair();
@@ -525,7 +526,7 @@ export class ProvisionerService {
try {
tailscaleAuthKey = await getTailscaleAuthKey(this.tsApiKey, this.tsTailnet);
} catch (err) {
console.warn("[ProvisionerService] Tailscale key failed — continuing without:", err);
logger.warn("Tailscale key failed — continuing without Tailscale", { error: String(err) });
}
}
@@ -534,7 +535,7 @@ export class ProvisionerService {
if (this.doVolumeGb > 0) {
const volName = `timmy-data-${jobId.slice(0, 8)}`;
volumeId = await createVolume(volName, this.doVolumeGb, this.doRegion, this.doToken);
console.log(`[ProvisionerService] Volume created: id=${volumeId} (${this.doVolumeGb} GB)`);
logger.info("block volume created", { volumeId, sizeGb: this.doVolumeGb });
}
// 5. Create droplet
@@ -556,11 +557,11 @@ export class ProvisionerService {
dropletPayload,
);
const dropletId = dropletData.droplet.id;
console.log(`[ProvisionerService] Droplet created: id=${dropletId}`);
logger.info("droplet created", { bootstrapJobId: jobId, dropletId });
// 6. Poll for public IP (up to 2 min)
const nodeIp = await pollDropletIp(dropletId, this.doToken, 120_000);
console.log(`[ProvisionerService] Node IP: ${nodeIp ?? "(not yet assigned)"}`);
logger.info("node ip assigned", { bootstrapJobId: jobId, nodeIp: nodeIp ?? "(not yet assigned)" });
// 7. Tailscale hostname
const tailscaleHostname =
@@ -589,7 +590,7 @@ export class ProvisionerService {
})
.where(eq(bootstrapJobs.id, jobId));
console.log(`[ProvisionerService] Bootstrap job ${jobId} ready.`);
logger.info("real provisioning complete", { bootstrapJobId: jobId });
}
}

View File

@@ -0,0 +1,46 @@
import { rateLimit, type Options } from "express-rate-limit";
import { makeLogger } from "./logger.js";
const logger = makeLogger("rate-limiter");
function envInt(key: string, fallback: number): number {
const v = process.env[key];
const n = v ? parseInt(v, 10) : NaN;
return Number.isNaN(n) ? fallback : n;
}
function limiter(windowMs: number, max: number, overrideKey?: string) {
const resolvedMax = overrideKey ? envInt(overrideKey, max) : max;
return rateLimit({
windowMs,
max: resolvedMax,
standardHeaders: "draft-7",
legacyHeaders: false,
handler: (req, res) => {
const ip =
(req.headers["x-forwarded-for"] as string | undefined)?.split(",")[0]?.trim() ??
req.socket.remoteAddress ??
"unknown";
logger.warn("rate limit hit", {
route: req.path,
method: req.method,
ip,
retry_after_s: Math.ceil(windowMs / 1000),
});
res.status(429).json({
error: "rate_limited",
message: "Too many requests — please slow down.",
retryAfterSeconds: Math.ceil(windowMs / 1000),
});
},
} satisfies Partial<Options>);
}
// POST /api/jobs — 30 req/min per IP (configurable via RATE_LIMIT_JOBS)
export const jobsLimiter = limiter(60_000, 30, "RATE_LIMIT_JOBS");
// POST /api/sessions — 10 req/min per IP (configurable via RATE_LIMIT_SESSIONS)
export const sessionsLimiter = limiter(60_000, 10, "RATE_LIMIT_SESSIONS");
// POST /api/bootstrap — 3 req/hour per IP (configurable via RATE_LIMIT_BOOTSTRAP)
export const bootstrapLimiter = limiter(60 * 60_000, 3, "RATE_LIMIT_BOOTSTRAP");

View File

@@ -0,0 +1,55 @@
import { PassThrough } from "stream";
interface StreamEntry {
stream: PassThrough;
createdAt: number;
}
class StreamRegistry {
private readonly streams = new Map<string, StreamEntry>();
private readonly TTL_MS = 5 * 60 * 1000;
register(jobId: string): PassThrough {
const existing = this.streams.get(jobId);
if (existing) {
existing.stream.destroy();
}
const stream = new PassThrough();
this.streams.set(jobId, { stream, createdAt: Date.now() });
stream.on("close", () => {
this.streams.delete(jobId);
});
this.evictExpired();
return stream;
}
get(jobId: string): PassThrough | null {
return this.streams.get(jobId)?.stream ?? null;
}
write(jobId: string, chunk: string): void {
this.streams.get(jobId)?.stream.write(chunk);
}
end(jobId: string): void {
const entry = this.streams.get(jobId);
if (entry) {
entry.stream.end();
this.streams.delete(jobId);
}
}
private evictExpired(): void {
const now = Date.now();
for (const [id, entry] of this.streams.entries()) {
if (now - entry.createdAt > this.TTL_MS) {
entry.stream.destroy();
this.streams.delete(id);
}
}
}
}
export const streamRegistry = new StreamRegistry();

View File

@@ -0,0 +1,31 @@
import type { Request, Response, NextFunction } from "express";
import { makeLogger } from "../lib/logger.js";
import { latencyHistogram } from "../lib/histogram.js";
const logger = makeLogger("http");
export function responseTimeMiddleware(req: Request, res: Response, next: NextFunction): void {
const startedAt = Date.now();
res.on("finish", () => {
const durationMs = Date.now() - startedAt;
const route = req.route?.path as string | undefined;
const routeKey = `${req.method} ${route ?? req.path}`;
latencyHistogram.record(routeKey, durationMs);
logger.info("request", {
method: req.method,
path: req.path,
route: route ?? null,
status: res.statusCode,
duration_ms: durationMs,
ip:
(req.headers["x-forwarded-for"] as string | undefined)?.split(",")[0]?.trim() ??
req.socket.remoteAddress ??
null,
});
});
next();
}

View File

@@ -5,6 +5,9 @@ import { eq, and } from "drizzle-orm";
import { lnbitsService } from "../lib/lnbits.js";
import { pricingService } from "../lib/pricing.js";
import { provisionerService } from "../lib/provisioner.js";
import { makeLogger } from "../lib/logger.js";
const logger = makeLogger("bootstrap");
const router = Router();
@@ -44,7 +47,7 @@ async function advanceBootstrapJob(job: BootstrapJob): Promise<BootstrapJob | nu
return getBootstrapJobById(job.id);
}
console.log(`[bootstrap] Payment confirmed for ${job.id} — starting provisioning`);
logger.info("bootstrap payment confirmed — starting provisioning", { bootstrapJobId: job.id });
// Fire-and-forget: provisioner updates DB when done
void provisionerService.provision(job.id);

View File

@@ -1,8 +1,10 @@
import { Router, type Request, type Response } from "express";
import { RunDemoQueryParams } from "@workspace/api-zod";
import { agentService } from "../lib/agent.js";
import { makeLogger } from "../lib/logger.js";
const router = Router();
const logger = makeLogger("demo");
const RATE_LIMIT_MAX = 5;
const RATE_LIMIT_WINDOW_MS = 60 * 60 * 1000;
@@ -35,6 +37,7 @@ router.get("/demo", async (req: Request, res: Response) => {
const { allowed, resetAt } = checkRateLimit(ip);
if (!allowed) {
const secsUntilReset = Math.ceil((resetAt - Date.now()) / 1000);
logger.warn("demo rate limited", { ip, retry_after_s: secsUntilReset });
res.status(429).json({
error: `Rate limit exceeded. Try again in ${secsUntilReset}s (5 requests per hour per IP).`,
});
@@ -52,11 +55,14 @@ router.get("/demo", async (req: Request, res: Response) => {
}
const { request } = parseResult.data;
logger.info("demo request received", { ip });
try {
const { result } = await agentService.executeWork(request);
res.json({ result });
} catch (err) {
const message = err instanceof Error ? err.message : "Agent error";
logger.error("demo agent error", { ip, error: message });
res.status(500).json({ error: message });
}
});

View File

@@ -1,11 +1,25 @@
import { Router, type IRouter } from "express";
import { HealthCheckResponse } from "@workspace/api-zod";
import { Router, type IRouter, type Request, type Response } from "express";
import { db, jobs } from "@workspace/db";
import { sql } from "drizzle-orm";
import { makeLogger } from "../lib/logger.js";
const router: IRouter = Router();
const logger = makeLogger("health");
router.get("/healthz", (_req, res) => {
const data = HealthCheckResponse.parse({ status: "ok" });
res.json(data);
const START_TIME = Date.now();
router.get("/healthz", async (_req: Request, res: Response) => {
try {
const rows = await db.select({ total: sql<number>`cast(count(*) as int)` }).from(jobs);
const jobsTotal = Number(rows[0]?.total ?? 0);
const uptimeS = Math.floor((Date.now() - START_TIME) / 1000);
res.json({ status: "ok", uptime_s: uptimeS, jobs_total: jobsTotal });
} catch (err) {
const message = err instanceof Error ? err.message : "Health check failed";
logger.error("healthz db query failed", { error: message });
const uptimeS = Math.floor((Date.now() - START_TIME) / 1000);
res.status(503).json({ status: "error", uptime_s: uptimeS, error: message });
}
});
export default router;

View File

@@ -8,10 +8,12 @@ import devRouter from "./dev.js";
import testkitRouter from "./testkit.js";
import uiRouter from "./ui.js";
import nodeDiagnosticsRouter from "./node-diagnostics.js";
import metricsRouter from "./metrics.js";
const router: IRouter = Router();
router.use(healthRouter);
router.use(metricsRouter);
router.use(jobsRouter);
router.use(bootstrapRouter);
router.use(sessionsRouter);

View File

@@ -6,6 +6,13 @@ import { CreateJobBody, GetJobParams } from "@workspace/api-zod";
import { lnbitsService } from "../lib/lnbits.js";
import { agentService } from "../lib/agent.js";
import { pricingService } from "../lib/pricing.js";
import { jobsLimiter } from "../lib/rate-limiter.js";
import { eventBus } from "../lib/event-bus.js";
import { streamRegistry } from "../lib/stream-registry.js";
import { makeLogger } from "../lib/logger.js";
import { latencyHistogram } from "../lib/histogram.js";
const logger = makeLogger("jobs");
const router = Router();
@@ -24,8 +31,18 @@ async function getInvoiceById(id: string) {
* return immediately with "evaluating" state instead of blocking 5-8 seconds.
*/
async function runEvalInBackground(jobId: string, request: string): Promise<void> {
const evalStart = Date.now();
try {
const evalResult = await agentService.evaluateRequest(request);
latencyHistogram.record("eval_phase", Date.now() - evalStart);
logger.info("eval result", {
jobId,
accepted: evalResult.accepted,
reason: evalResult.reason,
inputTokens: evalResult.inputTokens,
outputTokens: evalResult.outputTokens,
});
if (evalResult.accepted) {
const inputEst = pricingService.estimateInputTokens(request);
@@ -65,11 +82,13 @@ async function runEvalInBackground(jobId: string, request: string): Promise<void
})
.where(eq(jobs.id, jobId));
});
eventBus.publish({ type: "job:state", jobId, state: "awaiting_work_payment" });
} else {
await db
.update(jobs)
.set({ state: "rejected", rejectionReason: evalResult.reason, updatedAt: new Date() })
.where(eq(jobs.id, jobId));
eventBus.publish({ type: "job:state", jobId, state: "rejected" });
}
} catch (err) {
const message = err instanceof Error ? err.message : "Evaluation error";
@@ -77,15 +96,25 @@ async function runEvalInBackground(jobId: string, request: string): Promise<void
.update(jobs)
.set({ state: "failed", errorMessage: message, updatedAt: new Date() })
.where(eq(jobs.id, jobId));
eventBus.publish({ type: "job:failed", jobId, reason: message });
}
}
/**
* Runs the AI work execution in a background task so HTTP polls return fast.
* Uses streaming so any connected SSE client receives tokens in real time (#3).
*/
async function runWorkInBackground(jobId: string, request: string, workAmountSats: number, btcPriceUsd: number | null): Promise<void> {
const workStart = Date.now();
try {
const workResult = await agentService.executeWork(request);
eventBus.publish({ type: "job:state", jobId, state: "executing" });
const workResult = await agentService.executeWorkStreaming(request, (delta) => {
streamRegistry.write(jobId, delta);
});
streamRegistry.end(jobId);
latencyHistogram.record("work_phase", Date.now() - workStart);
const actualCostUsd = pricingService.calculateActualCostUsd(
workResult.inputTokens,
@@ -112,12 +141,24 @@ async function runWorkInBackground(jobId: string, request: string, workAmountSat
updatedAt: new Date(),
})
.where(eq(jobs.id, jobId));
logger.info("work completed", {
jobId,
inputTokens: workResult.inputTokens,
outputTokens: workResult.outputTokens,
actualAmountSats,
refundAmountSats,
refundState,
});
eventBus.publish({ type: "job:completed", jobId, result: workResult.result });
} catch (err) {
const message = err instanceof Error ? err.message : "Execution error";
streamRegistry.end(jobId);
await db
.update(jobs)
.set({ state: "failed", errorMessage: message, updatedAt: new Date() })
.where(eq(jobs.id, jobId));
eventBus.publish({ type: "job:failed", jobId, reason: message });
}
}
@@ -149,6 +190,10 @@ async function advanceJob(job: Job): Promise<Job | null> {
if (!advanced) return getJobById(job.id);
logger.info("invoice paid", { jobId: job.id, invoiceType: "eval", paymentHash: evalInvoice.paymentHash });
eventBus.publish({ type: "job:paid", jobId: job.id, invoiceType: "eval" });
eventBus.publish({ type: "job:state", jobId: job.id, state: "evaluating" });
// Fire AI eval in background — poll returns immediately with "evaluating"
setImmediate(() => { void runEvalInBackground(job.id, job.request); });
@@ -177,6 +222,12 @@ async function advanceJob(job: Job): Promise<Job | null> {
if (!advanced) return getJobById(job.id);
logger.info("invoice paid", { jobId: job.id, invoiceType: "work", paymentHash: workInvoice.paymentHash });
eventBus.publish({ type: "job:paid", jobId: job.id, invoiceType: "work" });
// Register stream slot before firing background work so first tokens aren't lost
streamRegistry.register(job.id);
// Fire AI work in background — poll returns immediately with "executing"
setImmediate(() => { void runWorkInBackground(job.id, job.request, job.workAmountSats ?? 0, job.btcPriceUsd); });
@@ -188,7 +239,7 @@ async function advanceJob(job: Job): Promise<Job | null> {
// ── POST /jobs ────────────────────────────────────────────────────────────────
router.post("/jobs", async (req: Request, res: Response) => {
router.post("/jobs", jobsLimiter, async (req: Request, res: Response) => {
const parseResult = CreateJobBody.safeParse(req.body);
if (!parseResult.success) {
const issue = parseResult.error.issues[0];
@@ -221,6 +272,8 @@ router.post("/jobs", async (req: Request, res: Response) => {
await tx.update(jobs).set({ evalInvoiceId: invoiceId, updatedAt: new Date() }).where(eq(jobs.id, jobId));
});
logger.info("job created", { jobId, evalAmountSats: evalFee, stubMode: lnbitsService.stubMode });
res.status(201).json({
jobId,
evalInvoice: {
@@ -231,6 +284,7 @@ router.post("/jobs", async (req: Request, res: Response) => {
});
} catch (err) {
const message = err instanceof Error ? err.message : "Failed to create job";
logger.error("job creation failed", { error: message });
res.status(500).json({ error: message });
}
});
@@ -404,4 +458,130 @@ router.post("/jobs/:id/refund", async (req: Request, res: Response) => {
}
});
// ── GET /jobs/:id/stream ──────────────────────────────────────────────────────
// Server-Sent Events (#3): streams Claude token deltas in real time while the
// job is executing. If the job is already complete, sends the full result then
// closes. If the job isn't executing yet, waits up to 60 s for it to start.
router.get("/jobs/:id/stream", async (req: Request, res: Response) => {
const paramResult = GetJobParams.safeParse(req.params);
if (!paramResult.success) {
res.status(400).json({ error: "Invalid job id" });
return;
}
const { id } = paramResult.data;
const job = await getJobById(id);
if (!job) {
res.status(404).json({ error: "Job not found" });
return;
}
res.setHeader("Content-Type", "text/event-stream");
res.setHeader("Cache-Control", "no-cache");
res.setHeader("Connection", "keep-alive");
res.setHeader("X-Accel-Buffering", "no");
res.flushHeaders();
const sendEvent = (event: string, data: unknown) => {
res.write(`event: ${event}\ndata: ${JSON.stringify(data)}\n\n`);
};
// Job already complete — replay full result immediately
if (job.state === "complete" && job.result) {
sendEvent("token", { text: job.result });
sendEvent("done", { jobId: id, state: "complete" });
res.end();
return;
}
if (job.state === "failed") {
sendEvent("error", { jobId: id, message: job.errorMessage ?? "Job failed" });
res.end();
return;
}
// Job is executing or about to execute — pipe the live stream
const sendHeartbeat = setInterval(() => {
res.write(": heartbeat\n\n");
}, 15_000);
const cleanup = () => {
clearInterval(sendHeartbeat);
};
req.on("close", cleanup);
// ── Wait for stream slot (fixes #16 race condition) ──────────────────────
// After the bus wait we re-check BOTH the stream registry AND the DB so we
// handle: (a) job completed while we waited (stream already gone), (b) job
// still executing but stream was registered after we first checked.
let stream = streamRegistry.get(id);
let currentJob = job;
if (!stream) {
await new Promise<void>((resolve) => {
// 90 s timeout — generous enough for slow payment confirmations on mainnet
const deadline = setTimeout(resolve, 90_000);
const busListener = (data: Parameters<typeof eventBus.publish>[0]) => {
if ("jobId" in data && data.jobId === id) {
clearTimeout(deadline);
eventBus.off("bus", busListener);
resolve();
}
};
eventBus.on("bus", busListener);
});
// Refresh both stream slot and job state after waiting
stream = streamRegistry.get(id);
currentJob = (await getJobById(id)) ?? currentJob;
}
// ── Resolve: stream available ─────────────────────────────────────────────
if (stream) {
const attachToStream = (s: typeof stream) => {
s!.on("data", (chunk: Buffer) => {
sendEvent("token", { text: chunk.toString("utf8") });
});
s!.on("end", () => {
sendEvent("done", { jobId: id, state: "complete" });
res.end();
cleanup();
});
s!.on("error", (err: Error) => {
sendEvent("error", { jobId: id, message: err.message });
res.end();
cleanup();
});
};
attachToStream(stream);
return;
}
// ── Resolve: job completed while we waited (stream already gone) ──────────
if (currentJob.state === "complete" && currentJob.result) {
sendEvent("token", { text: currentJob.result });
sendEvent("done", { jobId: id, state: "complete" });
res.end();
cleanup();
return;
}
if (currentJob.state === "failed") {
sendEvent("error", { jobId: id, message: currentJob.errorMessage ?? "Job failed" });
res.end();
cleanup();
return;
}
// ── Resolve: timeout with no activity — tell client to fall back to polling
sendEvent("error", {
jobId: id,
message: "Stream timed out. Poll GET /api/jobs/:id for current state.",
});
res.end();
cleanup();
});
export default router;

View File

@@ -0,0 +1,19 @@
import { Router, type Request, type Response } from "express";
import { metricsService } from "../lib/metrics.js";
import { makeLogger } from "../lib/logger.js";
const router = Router();
const logger = makeLogger("metrics");
router.get("/metrics", async (_req: Request, res: Response) => {
try {
const snapshot = await metricsService.snapshot();
res.json(snapshot);
} catch (err) {
const message = err instanceof Error ? err.message : "Failed to collect metrics";
logger.error("snapshot failed", { error: message });
res.status(500).json({ error: message });
}
});
export default router;

View File

@@ -3,6 +3,8 @@ import { randomBytes, randomUUID } from "crypto";
import { db, sessions, sessionRequests, type Session } from "@workspace/db";
import { eq, and } from "drizzle-orm";
import { lnbitsService } from "../lib/lnbits.js";
import { sessionsLimiter } from "../lib/rate-limiter.js";
import { eventBus } from "../lib/event-bus.js";
import { agentService } from "../lib/agent.js";
import { pricingService } from "../lib/pricing.js";
import { getBtcPriceUsd, usdToSats } from "../lib/btc-oracle.js";
@@ -133,7 +135,7 @@ async function advanceTopup(session: Session): Promise<Session> {
// ── POST /sessions ─────────────────────────────────────────────────────────────
router.post("/sessions", async (req: Request, res: Response) => {
router.post("/sessions", sessionsLimiter, async (req: Request, res: Response) => {
const rawAmount = req.body?.amount_sats;
const amountSats = parseInt(String(rawAmount ?? ""), 10);
@@ -220,7 +222,7 @@ router.post("/sessions/:id/request", async (req: Request, res: Response) => {
}
try {
let session = await getSessionById(id);
const session = await getSessionById(id);
if (!session) { res.status(404).json({ error: "Session not found" }); return; }
// Auth

View File

@@ -9,6 +9,8 @@ const router = Router();
* BASE URL. Agents and testers can run the full test suite with one command:
*
* curl -s https://your-url.replit.app/api/testkit | bash
*
* Cross-platform: works on Linux and macOS (avoids GNU-only head -n-1).
*/
router.get("/testkit", (req: Request, res: Response) => {
const proto =
@@ -31,16 +33,17 @@ FAIL=0
SKIP=0
note() { echo " [\$1] \$2"; }
jq_field() { echo "\$1" | jq -r "\$2" 2>/dev/null || echo ""; }
sep() { echo; echo "=== $* ==="; }
sep() { echo; echo "=== $* ==="; }
# body_of: strip last line (HTTP status code) — works on GNU and BSD (macOS)
body_of() { echo "\$1" | sed '$d'; }
code_of() { echo "\$1" | tail -n1; }
# ---------------------------------------------------------------------------
# Test 1 — Health check
# ---------------------------------------------------------------------------
sep "Test 1 — Health check"
T1_RES=$(curl -s -w "\\n%{http_code}" "$BASE/api/healthz")
T1_BODY=$(echo "$T1_RES" | head -n-1)
T1_CODE=$(echo "$T1_RES" | tail -n1)
T1_BODY=$(body_of "$T1_RES"); T1_CODE=$(code_of "$T1_RES")
if [[ "$T1_CODE" == "200" ]] && [[ "$(echo "$T1_BODY" | jq -r '.status' 2>/dev/null)" == "ok" ]]; then
note PASS "HTTP 200, status=ok"
PASS=$((PASS+1))
@@ -56,8 +59,7 @@ sep "Test 2 — Create job"
T2_RES=$(curl -s -w "\\n%{http_code}" -X POST "$BASE/api/jobs" \\
-H "Content-Type: application/json" \\
-d '{"request":"Explain the Lightning Network in two sentences"}')
T2_BODY=$(echo "$T2_RES" | head -n-1)
T2_CODE=$(echo "$T2_RES" | tail -n1)
T2_BODY=$(body_of "$T2_RES"); T2_CODE=$(code_of "$T2_RES")
JOB_ID=$(echo "$T2_BODY" | jq -r '.jobId' 2>/dev/null || echo "")
EVAL_AMT=$(echo "$T2_BODY" | jq -r '.evalInvoice.amountSats' 2>/dev/null || echo "")
if [[ "$T2_CODE" == "201" && -n "$JOB_ID" && "$EVAL_AMT" == "10" ]]; then
@@ -73,8 +75,7 @@ fi
# ---------------------------------------------------------------------------
sep "Test 3 — Poll before payment"
T3_RES=$(curl -s -w "\\n%{http_code}" "$BASE/api/jobs/$JOB_ID")
T3_BODY=$(echo "$T3_RES" | head -n-1)
T3_CODE=$(echo "$T3_RES" | tail -n1)
T3_BODY=$(body_of "$T3_RES"); T3_CODE=$(code_of "$T3_RES")
STATE_T3=$(echo "$T3_BODY" | jq -r '.state' 2>/dev/null || echo "")
EVAL_AMT_ECHO=$(echo "$T3_BODY" | jq -r '.evalInvoice.amountSats' 2>/dev/null || echo "")
EVAL_HASH=$(echo "$T3_BODY" | jq -r '.evalInvoice.paymentHash' 2>/dev/null || echo "")
@@ -99,8 +100,7 @@ fi
sep "Test 4 — Pay eval invoice (stub)"
if [[ -n "$EVAL_HASH" && "$EVAL_HASH" != "null" ]]; then
T4_RES=$(curl -s -w "\\n%{http_code}" -X POST "$BASE/api/dev/stub/pay/$EVAL_HASH")
T4_BODY=$(echo "$T4_RES" | head -n-1)
T4_CODE=$(echo "$T4_RES" | tail -n1)
T4_BODY=$(body_of "$T4_RES"); T4_CODE=$(code_of "$T4_RES")
if [[ "$T4_CODE" == "200" ]] && [[ "$(echo "$T4_BODY" | jq -r '.ok' 2>/dev/null)" == "true" ]]; then
note PASS "Eval invoice marked paid"
PASS=$((PASS+1))
@@ -114,25 +114,32 @@ else
fi
# ---------------------------------------------------------------------------
# Test 5 — Poll after eval payment
# Test 5 — Poll after eval payment (with retry loop — real AI eval takes 25 s)
# ---------------------------------------------------------------------------
sep "Test 5 — Poll after eval (state advance)"
sleep 2
T5_RES=$(curl -s -w "\\n%{http_code}" "$BASE/api/jobs/$JOB_ID")
T5_BODY=$(echo "$T5_RES" | head -n-1)
T5_CODE=$(echo "$T5_RES" | tail -n1)
STATE_T5=$(echo "$T5_BODY" | jq -r '.state' 2>/dev/null || echo "")
WORK_AMT=$(echo "$T5_BODY" | jq -r '.workInvoice.amountSats' 2>/dev/null || echo "")
WORK_HASH=$(echo "$T5_BODY" | jq -r '.workInvoice.paymentHash' 2>/dev/null || echo "")
START_T5=$(date +%s)
T5_TIMEOUT=30
STATE_T5=""; WORK_AMT=""; WORK_HASH=""; T5_BODY=""; T5_CODE=""
while :; do
T5_RES=$(curl -s -w "\\n%{http_code}" "$BASE/api/jobs/$JOB_ID")
T5_BODY=$(body_of "$T5_RES"); T5_CODE=$(code_of "$T5_RES")
STATE_T5=$(echo "$T5_BODY" | jq -r '.state' 2>/dev/null || echo "")
WORK_AMT=$(echo "$T5_BODY" | jq -r '.workInvoice.amountSats' 2>/dev/null || echo "")
WORK_HASH=$(echo "$T5_BODY" | jq -r '.workInvoice.paymentHash' 2>/dev/null || echo "")
NOW_T5=$(date +%s); ELAPSED_T5=$((NOW_T5 - START_T5))
if [[ "$STATE_T5" == "awaiting_work_payment" || "$STATE_T5" == "rejected" ]]; then break; fi
if (( ELAPSED_T5 > T5_TIMEOUT )); then break; fi
sleep 2
done
if [[ "$T5_CODE" == "200" && "$STATE_T5" == "awaiting_work_payment" && -n "$WORK_AMT" && "$WORK_AMT" != "null" ]]; then
note PASS "state=awaiting_work_payment, workInvoice.amountSats=$WORK_AMT"
note PASS "state=awaiting_work_payment in $ELAPSED_T5 s, workInvoice.amountSats=$WORK_AMT"
PASS=$((PASS+1))
elif [[ "$T5_CODE" == "200" && "$STATE_T5" == "rejected" ]]; then
note PASS "Request correctly rejected by agent after eval"
note PASS "Request correctly rejected by agent after eval (in $ELAPSED_T5 s)"
PASS=$((PASS+1))
WORK_HASH=""
else
note FAIL "code=$T5_CODE state=$STATE_T5 body=$T5_BODY"
note FAIL "code=$T5_CODE state=$STATE_T5 body=$T5_BODY (after $ELAPSED_T5 s)"
FAIL=$((FAIL+1))
fi
@@ -142,8 +149,7 @@ fi
sep "Test 6 — Pay work invoice + get result"
if [[ "$STATE_T5" == "awaiting_work_payment" && -n "$WORK_HASH" && "$WORK_HASH" != "null" ]]; then
T6_PAY_RES=$(curl -s -w "\\n%{http_code}" -X POST "$BASE/api/dev/stub/pay/$WORK_HASH")
T6_PAY_BODY=$(echo "$T6_PAY_RES" | head -n-1)
T6_PAY_CODE=$(echo "$T6_PAY_RES" | tail -n1)
T6_PAY_BODY=$(body_of "$T6_PAY_RES"); T6_PAY_CODE=$(code_of "$T6_PAY_RES")
if [[ "$T6_PAY_CODE" != "200" ]] || [[ "$(echo "$T6_PAY_BODY" | jq -r '.ok' 2>/dev/null)" != "true" ]]; then
note FAIL "Work payment stub failed: code=$T6_PAY_CODE body=$T6_PAY_BODY"
FAIL=$((FAIL+1))
@@ -152,11 +158,10 @@ if [[ "$STATE_T5" == "awaiting_work_payment" && -n "$WORK_HASH" && "$WORK_HASH"
TIMEOUT=30
while :; do
T6_RES=$(curl -s -w "\\n%{http_code}" "$BASE/api/jobs/$JOB_ID")
T6_BODY=$(echo "$T6_RES" | head -n-1)
T6_BODY=$(body_of "$T6_RES")
STATE_T6=$(echo "$T6_BODY" | jq -r '.state' 2>/dev/null || echo "")
RESULT_T6=$(echo "$T6_BODY" | jq -r '.result' 2>/dev/null || echo "")
NOW_TS=$(date +%s)
ELAPSED=$((NOW_TS - START_TS))
NOW_TS=$(date +%s); ELAPSED=$((NOW_TS - START_TS))
if [[ "$STATE_T6" == "complete" && -n "$RESULT_T6" && "$RESULT_T6" != "null" ]]; then
note PASS "state=complete in $ELAPSED s"
echo " Result: \${RESULT_T6:0:200}..."
@@ -177,33 +182,13 @@ else
fi
# ---------------------------------------------------------------------------
# Test 7Demo endpoint
# ---------------------------------------------------------------------------
sep "Test 7 — Demo endpoint"
START_DEMO=$(date +%s)
T7_RES=$(curl -s -w "\\n%{http_code}" "$BASE/api/demo?request=What+is+a+satoshi")
T7_BODY=$(echo "$T7_RES" | head -n-1)
T7_CODE=$(echo "$T7_RES" | tail -n1)
END_DEMO=$(date +%s)
ELAPSED_DEMO=$((END_DEMO - START_DEMO))
RESULT_T7=$(echo "$T7_BODY" | jq -r '.result' 2>/dev/null || echo "")
if [[ "$T7_CODE" == "200" && -n "$RESULT_T7" && "$RESULT_T7" != "null" ]]; then
note PASS "HTTP 200, result in $ELAPSED_DEMO s"
echo " Result: \${RESULT_T7:0:200}..."
PASS=$((PASS+1))
else
note FAIL "code=$T7_CODE body=$T7_BODY"
FAIL=$((FAIL+1))
fi
# ---------------------------------------------------------------------------
# Test 8 — Input validation (4 sub-cases)
# Test 8Input validation (run BEFORE test 7 to avoid rate-limit interference)
# ---------------------------------------------------------------------------
sep "Test 8 — Input validation"
T8A_RES=$(curl -s -w "\\n%{http_code}" -X POST "$BASE/api/jobs" \\
-H "Content-Type: application/json" -d '{}')
T8A_BODY=$(echo "$T8A_RES" | head -n-1); T8A_CODE=$(echo "$T8A_RES" | tail -n1)
T8A_BODY=$(body_of "$T8A_RES"); T8A_CODE=$(code_of "$T8A_RES")
if [[ "$T8A_CODE" == "400" && -n "$(echo "$T8A_BODY" | jq -r '.error' 2>/dev/null)" ]]; then
note PASS "8a: Missing request body → HTTP 400"
PASS=$((PASS+1))
@@ -213,7 +198,7 @@ else
fi
T8B_RES=$(curl -s -w "\\n%{http_code}" "$BASE/api/jobs/does-not-exist")
T8B_BODY=$(echo "$T8B_RES" | head -n-1); T8B_CODE=$(echo "$T8B_RES" | tail -n1)
T8B_BODY=$(body_of "$T8B_RES"); T8B_CODE=$(code_of "$T8B_RES")
if [[ "$T8B_CODE" == "404" && -n "$(echo "$T8B_BODY" | jq -r '.error' 2>/dev/null)" ]]; then
note PASS "8b: Unknown job ID → HTTP 404"
PASS=$((PASS+1))
@@ -222,8 +207,9 @@ else
FAIL=$((FAIL+1))
fi
# 8c runs here — before tests 7 and 9 consume rate-limit quota
T8C_RES=$(curl -s -w "\\n%{http_code}" "$BASE/api/demo")
T8C_BODY=$(echo "$T8C_RES" | head -n-1); T8C_CODE=$(echo "$T8C_RES" | tail -n1)
T8C_BODY=$(body_of "$T8C_RES"); T8C_CODE=$(code_of "$T8C_RES")
if [[ "$T8C_CODE" == "400" && -n "$(echo "$T8C_BODY" | jq -r '.error' 2>/dev/null)" ]]; then
note PASS "8c: Demo missing param → HTTP 400"
PASS=$((PASS+1))
@@ -236,7 +222,7 @@ LONG_STR=$(node -e "process.stdout.write('x'.repeat(501))" 2>/dev/null || python
T8D_RES=$(curl -s -w "\\n%{http_code}" -X POST "$BASE/api/jobs" \\
-H "Content-Type: application/json" \\
-d "{\\"request\\":\\"$LONG_STR\\"}")
T8D_BODY=$(echo "$T8D_RES" | head -n-1); T8D_CODE=$(echo "$T8D_RES" | tail -n1)
T8D_BODY=$(body_of "$T8D_RES"); T8D_CODE=$(code_of "$T8D_RES")
T8D_ERR=$(echo "$T8D_BODY" | jq -r '.error' 2>/dev/null || echo "")
if [[ "$T8D_CODE" == "400" && "$T8D_ERR" == *"500 characters"* ]]; then
note PASS "8d: 501-char request → HTTP 400 with character limit error"
@@ -247,13 +233,31 @@ else
fi
# ---------------------------------------------------------------------------
# Test 9 — Demo rate limiter
# Test 7 — Demo endpoint (after validation, before rate-limit exhaustion test)
# ---------------------------------------------------------------------------
sep "Test 7 — Demo endpoint"
START_DEMO=$(date +%s)
T7_RES=$(curl -s -w "\\n%{http_code}" "$BASE/api/demo?request=What+is+a+satoshi")
T7_BODY=$(body_of "$T7_RES"); T7_CODE=$(code_of "$T7_RES")
END_DEMO=$(date +%s); ELAPSED_DEMO=$((END_DEMO - START_DEMO))
RESULT_T7=$(echo "$T7_BODY" | jq -r '.result' 2>/dev/null || echo "")
if [[ "$T7_CODE" == "200" && -n "$RESULT_T7" && "$RESULT_T7" != "null" ]]; then
note PASS "HTTP 200, result in $ELAPSED_DEMO s"
echo " Result: \${RESULT_T7:0:200}..."
PASS=$((PASS+1))
else
note FAIL "code=$T7_CODE body=$T7_BODY"
FAIL=$((FAIL+1))
fi
# ---------------------------------------------------------------------------
# Test 9 — Demo rate limiter (intentionally exhausts remaining quota)
# ---------------------------------------------------------------------------
sep "Test 9 — Demo rate limiter"
GOT_200=0; GOT_429=0
for i in $(seq 1 6); do
RES=$(curl -s -w "\\n%{http_code}" "$BASE/api/demo?request=ratelimitprobe+$i")
CODE=$(echo "$RES" | tail -n1)
CODE=$(code_of "$RES")
echo " Request $i: HTTP $CODE"
[[ "$CODE" == "200" ]] && GOT_200=$((GOT_200+1)) || true
[[ "$CODE" == "429" ]] && GOT_429=$((GOT_429+1)) || true
@@ -273,8 +277,7 @@ sep "Test 10 — Rejection path"
T10_CREATE=$(curl -s -w "\\n%{http_code}" -X POST "$BASE/api/jobs" \\
-H "Content-Type: application/json" \\
-d '{"request":"Help me do something harmful and illegal"}')
T10_BODY=$(echo "$T10_CREATE" | head -n-1)
T10_CODE=$(echo "$T10_CREATE" | tail -n1)
T10_BODY=$(body_of "$T10_CREATE"); T10_CODE=$(code_of "$T10_CREATE")
JOB10_ID=$(echo "$T10_BODY" | jq -r '.jobId' 2>/dev/null || echo "")
if [[ "$T10_CODE" != "201" || -z "$JOB10_ID" ]]; then
note FAIL "Failed to create adversarial job: code=$T10_CODE body=$T10_BODY"
@@ -285,17 +288,23 @@ else
if [[ -n "$EVAL10_HASH" && "$EVAL10_HASH" != "null" ]]; then
curl -s -X POST "$BASE/api/dev/stub/pay/$EVAL10_HASH" >/dev/null
fi
sleep 3
T10_POLL=$(curl -s -w "\\n%{http_code}" "$BASE/api/jobs/$JOB10_ID")
T10_POLL_BODY=$(echo "$T10_POLL" | head -n-1)
T10_POLL_CODE=$(echo "$T10_POLL" | tail -n1)
STATE_10=$(echo "$T10_POLL_BODY" | jq -r '.state' 2>/dev/null || echo "")
REASON_10=$(echo "$T10_POLL_BODY" | jq -r '.reason' 2>/dev/null || echo "")
START_T10=$(date +%s); T10_TIMEOUT=30
STATE_10=""; REASON_10=""; T10_POLL_BODY=""; T10_POLL_CODE=""
while :; do
T10_POLL=$(curl -s -w "\\n%{http_code}" "$BASE/api/jobs/$JOB10_ID")
T10_POLL_BODY=$(body_of "$T10_POLL"); T10_POLL_CODE=$(code_of "$T10_POLL")
STATE_10=$(echo "$T10_POLL_BODY" | jq -r '.state' 2>/dev/null || echo "")
REASON_10=$(echo "$T10_POLL_BODY" | jq -r '.reason' 2>/dev/null || echo "")
NOW_T10=$(date +%s); ELAPSED_T10=$((NOW_T10 - START_T10))
if [[ "$STATE_10" == "rejected" || "$STATE_10" == "failed" ]]; then break; fi
if (( ELAPSED_T10 > T10_TIMEOUT )); then break; fi
sleep 2
done
if [[ "$T10_POLL_CODE" == "200" && "$STATE_10" == "rejected" && -n "$REASON_10" && "$REASON_10" != "null" ]]; then
note PASS "state=rejected, reason: \${REASON_10:0:120}"
note PASS "state=rejected in $ELAPSED_T10 s, reason: \${REASON_10:0:120}"
PASS=$((PASS+1))
else
note FAIL "code=$T10_POLL_CODE state=$STATE_10 body=$T10_POLL_BODY"
note FAIL "code=$T10_POLL_CODE state=$STATE_10 body=$T10_POLL_BODY (after $ELAPSED_T10 s)"
FAIL=$((FAIL+1))
fi
fi
@@ -307,8 +316,7 @@ sep "Test 11 — Session: create session (awaiting_payment)"
T11_RES=$(curl -s -w "\\n%{http_code}" -X POST "$BASE/api/sessions" \\
-H "Content-Type: application/json" \\
-d '{"amount_sats": 200}')
T11_BODY=$(echo "$T11_RES" | head -n-1)
T11_CODE=$(echo "$T11_RES" | tail -n1)
T11_BODY=$(body_of "$T11_RES"); T11_CODE=$(code_of "$T11_RES")
SESSION_ID=$(echo "$T11_BODY" | jq -r '.sessionId' 2>/dev/null || echo "")
T11_STATE=$(echo "$T11_BODY" | jq -r '.state' 2>/dev/null || echo "")
T11_AMT=$(echo "$T11_BODY" | jq -r '.invoice.amountSats' 2>/dev/null || echo "")
@@ -322,12 +330,11 @@ else
fi
# ---------------------------------------------------------------------------
# Test 12 — Session: poll before payment (stub hash present)
# Test 12 — Session: poll before payment
# ---------------------------------------------------------------------------
sep "Test 12 — Session: poll before payment"
T12_RES=$(curl -s -w "\\n%{http_code}" "$BASE/api/sessions/$SESSION_ID")
T12_BODY=$(echo "$T12_RES" | head -n-1)
T12_CODE=$(echo "$T12_RES" | tail -n1)
T12_BODY=$(body_of "$T12_RES"); T12_CODE=$(code_of "$T12_RES")
T12_STATE=$(echo "$T12_BODY" | jq -r '.state' 2>/dev/null || echo "")
if [[ -z "$DEPOSIT_HASH" || "$DEPOSIT_HASH" == "null" ]]; then
DEPOSIT_HASH=$(echo "$T12_BODY" | jq -r '.invoice.paymentHash' 2>/dev/null || echo "")
@@ -348,8 +355,7 @@ if [[ -n "$DEPOSIT_HASH" && "$DEPOSIT_HASH" != "null" ]]; then
curl -s -X POST "$BASE/api/dev/stub/pay/$DEPOSIT_HASH" >/dev/null
sleep 1
T13_RES=$(curl -s -w "\\n%{http_code}" "$BASE/api/sessions/$SESSION_ID")
T13_BODY=$(echo "$T13_RES" | head -n-1)
T13_CODE=$(echo "$T13_RES" | tail -n1)
T13_BODY=$(body_of "$T13_RES"); T13_CODE=$(code_of "$T13_RES")
T13_STATE=$(echo "$T13_BODY" | jq -r '.state' 2>/dev/null || echo "")
T13_BAL=$(echo "$T13_BODY" | jq -r '.balanceSats' 2>/dev/null || echo "")
SESSION_MACAROON=$(echo "$T13_BODY" | jq -r '.macaroon' 2>/dev/null || echo "")
@@ -375,15 +381,13 @@ if [[ -n "$SESSION_MACAROON" && "$SESSION_MACAROON" != "null" ]]; then
-H "Content-Type: application/json" \\
-H "Authorization: Bearer $SESSION_MACAROON" \\
-d '{"request":"What is Bitcoin in one sentence?"}')
T14_BODY=$(echo "$T14_RES" | head -n-1)
T14_CODE=$(echo "$T14_RES" | tail -n1)
T14_BODY=$(body_of "$T14_RES"); T14_CODE=$(code_of "$T14_RES")
T14_STATE=$(echo "$T14_BODY" | jq -r '.state' 2>/dev/null || echo "")
T14_DEBITED=$(echo "$T14_BODY" | jq -r '.debitedSats' 2>/dev/null || echo "")
T14_BAL=$(echo "$T14_BODY" | jq -r '.balanceRemaining' 2>/dev/null || echo "")
END_T14=$(date +%s)
ELAPSED_T14=$((END_T14 - START_T14))
END_T14=$(date +%s); ELAPSED_T14=$((END_T14 - START_T14))
if [[ "$T14_CODE" == "200" && ("$T14_STATE" == "complete" || "$T14_STATE" == "rejected") && -n "$T14_DEBITED" && "$T14_DEBITED" != "null" && -n "$T14_BAL" ]]; then
note PASS "state=$T14_STATE in ${ELAPSED_T14}s, debitedSats=$T14_DEBITED, balanceRemaining=$T14_BAL"
note PASS "state=$T14_STATE in \${ELAPSED_T14}s, debitedSats=$T14_DEBITED, balanceRemaining=$T14_BAL"
PASS=$((PASS+1))
else
note FAIL "code=$T14_CODE body=$T14_BODY"
@@ -402,7 +406,7 @@ if [[ -n "$SESSION_ID" ]]; then
T15_RES=$(curl -s -w "\\n%{http_code}" -X POST "$BASE/api/sessions/$SESSION_ID/request" \\
-H "Content-Type: application/json" \\
-d '{"request":"What is Bitcoin?"}')
T15_CODE=$(echo "$T15_RES" | tail -n1)
T15_CODE=$(code_of "$T15_RES")
if [[ "$T15_CODE" == "401" ]]; then
note PASS "HTTP 401 without macaroon"
PASS=$((PASS+1))
@@ -424,8 +428,7 @@ if [[ -n "$SESSION_MACAROON" && "$SESSION_MACAROON" != "null" ]]; then
-H "Content-Type: application/json" \\
-H "Authorization: Bearer $SESSION_MACAROON" \\
-d '{"amount_sats": 500}')
T16_BODY=$(echo "$T16_RES" | head -n-1)
T16_CODE=$(echo "$T16_RES" | tail -n1)
T16_BODY=$(body_of "$T16_RES"); T16_CODE=$(code_of "$T16_RES")
T16_PR=$(echo "$T16_BODY" | jq -r '.topup.paymentRequest' 2>/dev/null || echo "")
T16_AMT=$(echo "$T16_BODY" | jq -r '.topup.amountSats' 2>/dev/null || echo "")
if [[ "$T16_CODE" == "200" && -n "$T16_PR" && "$T16_PR" != "null" && "$T16_AMT" == "500" ]]; then

View File

@@ -0,0 +1,55 @@
WHAT THIS IS:
A tmux-based autonomous dev loop where AI agents collaborate:
- Hermes (Claude, cloud) = orchestrator. Reads code, writes Kimi prompts, reviews output, manages PRs.
- Kimi (Qwen3 30B, local) = coder. Gets dispatched to git worktrees, writes code, runs tests.
- Timmy (Claude Code, local) = sovereign AI being built. The PRODUCT, not a worker.
- Gitea = self-hosted git. PRs, branch protection, squash-only merge, auto-delete branches.
- tox = test/lint/format runner. Pre-commit hooks enforce quality gates.
KEY FILES TO REVIEW:
bin/timmy-loop-prompt.md — the main orchestration prompt (the "brain")
bin/timmy-loop.sh — shell driver that invokes hermes with the prompt
bin/tower-session.sh — tmux session layout (Hermes + Timmy + watchdog)
tmux/tower-session.sh — the tower tmux layout
bin/tower-watchdog.sh — process health monitor
bin/tower-hermes.sh — hermes entry point
bin/tower-timmy.sh — timmy entry point
bin/hermes-claim — issue claim/release system
bin/hermes-dispatch — kimi dispatch helper
bin/hermes-enqueue — queue management
bin/pr-automerge.sh — PR auto-merge on CI pass
bin/timmy-loopstat.sh — real-time status dashboard
bin/timmy-strategy.sh — triage/strategy logic
bin/timmy-watchdog.sh — older watchdog (may be superseded)
ARCHITECTURE CONSTRAINTS:
- Local-first. No cloud dependencies for inference. Ollama serves models.
- Sovereignty matters. Timmy runs on local hardware, not APIs.
- Quality gates are sacred. Never bypass hooks, tests, or linters.
- Squash-only, linear git history. Every commit on main = one squashed PR.
- Config over code. Prefer YAML-driven behavior changes.
KNOWN PAIN POINTS:
1. Kimi scans the full codebase if not given precise file paths — slow and wasteful.
2. Worktree cleanup sometimes fails, leaving orphaned /tmp/timmy-cycle-* dirs.
3. The loop prompt (timmy-loop-prompt.md) is 327 lines — may be too monolithic.
4. No structured retry logic when Kimi produces bad output (just re-dispatch).
5. The claim system (hermes-claim) is file-based JSON — race conditions possible.
6. Status dashboard (loopstat) polls files on disk — no event-driven updates.
7. Two watchdog scripts exist (tower-watchdog.sh, timmy-watchdog.sh) — unclear which is canonical.
8. No metrics/telemetry beyond the cycle JSONL logs.
WHAT I WANT FROM YOU:
1. AUDIT: Read every script. Map the data flow. Identify dead code, redundancy, and fragility.
2. ARCHITECTURE REVIEW: Is the tmux-pane model the right abstraction? What's better?
3. CONCRETE IMPROVEMENTS: File PRs against this repo with actual code changes. Not just suggestions — working code. Prioritize:
- Reliability (crash recovery, cleanup, idempotency)
- Observability (know what's happening without reading log files)
- Simplicity (fewer scripts, clearer contracts between components)
- Performance (faster cycles, less wasted inference)
4. PROPOSAL: If you think the whole thing should be restructured, write a design doc as a PR. Show me the target architecture.
Start by reading every file, then give me your assessment before writing any code.
```
---

23
eslint.config.ts Normal file
View File

@@ -0,0 +1,23 @@
import tseslint from "typescript-eslint";
export default tseslint.config(
{
ignores: [
"**/node_modules/**",
"**/dist/**",
"**/.cache/**",
"**/.local/**",
"**/lib/api-zod/src/generated/**",
"**/lib/api-client-react/src/generated/**",
"**/lib/integrations/**",
],
},
...tseslint.configs.recommended,
{
rules: {
"@typescript-eslint/no-explicit-any": "warn",
"@typescript-eslint/no-unused-vars": "off",
"@typescript-eslint/no-require-imports": "warn",
},
},
);

View File

@@ -11,5 +11,8 @@
"@anthropic-ai/sdk": "^0.78.0",
"p-limit": "^7.3.0",
"p-retry": "^7.1.1"
},
"devDependencies": {
"@types/node": "catalog:"
}
}

View File

@@ -1,5 +1,5 @@
import pLimit from "p-limit";
import pRetry from "p-retry";
import pRetry, { AbortError } from "p-retry";
/**
* Batch Processing Utilities
@@ -75,7 +75,7 @@ export async function batchProcess<T, R>(
if (isRateLimitError(error)) {
throw error;
}
throw new pRetry.AbortError(
throw new AbortError(
error instanceof Error ? error : new Error(String(error))
);
}
@@ -115,7 +115,7 @@ export async function batchProcessWithSSE<T, R>(
factor: 2,
onFailedAttempt: (error) => {
if (!isRateLimitError(error)) {
throw new pRetry.AbortError(
throw new AbortError(
error instanceof Error ? error : new Error(String(error))
);
}

View File

@@ -7,12 +7,15 @@
"build": "pnpm run typecheck && pnpm -r --if-present run build",
"typecheck:libs": "tsc --build",
"typecheck": "pnpm run typecheck:libs && pnpm -r --filter \"./artifacts/**\" --filter \"./scripts\" --if-present run typecheck",
"lint": "eslint .",
"test": "bash scripts/test-local.sh",
"test:prod": "BASE=https://timmy.replit.app bash timmy_test.sh"
},
"private": true,
"devDependencies": {
"eslint": "^10.0.3",
"prettier": "^3.8.1",
"typescript": "~5.9.2",
"prettier": "^3.8.1"
"typescript-eslint": "^8.57.1"
}
}

676
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -292,6 +292,52 @@ Key properties:
DB tables: `sessions` (state machine, balance, macaroon), `session_requests` (per-request token + cost accounting)
## Pushing to Gitea
Gitea runs on the local Mac behind a bore tunnel. The bore port changes every session.
### One-time setup
```bash
bash scripts/push-to-gitea.sh <PORT> # saves port to .bore-port, then pushes
```
### Every subsequent push this session
```bash
bash scripts/push-to-gitea.sh # reads port from .bore-port automatically
```
### When bore restarts (new port)
Bore assigns a new random port on each restart. You must pass it once — after that `.bore-port` remembers it:
```bash
bash scripts/push-to-gitea.sh <NEW_PORT> # overwrites .bore-port, then pushes
```
**How to find the bore port:** The port is shown in the Mac terminal where bore is running:
```
bore local 3000 --to bore.pub
→ "listening at bore.pub:NNNNN"
```
**Credentials (GITEA_TOKEN):** The script never hard-codes a token. Set it one of two ways:
```bash
# Option A — env var (add to shell profile for persistence)
export GITEA_TOKEN=<your-token>
# Option B — gitignored credentials file (one-time setup)
echo <your-token> > .gitea-credentials
```
Get your token from Gitea → User Settings → Applications → Generate Token.
**Rules:**
- Always create a branch and open a PR — never push directly to `main` (Gitea enforces this)
- The `.bore-port` and `.gitea-credentials` files are gitignored — never committed
### Gitea repos
- `replit/token-gated-economy` — TypeScript API server (this repo)
- `perplexity/the-matrix` — Three.js 3D world frontend
## Roadmap
### Nostr integration

View File

@@ -1,106 +1,209 @@
#!/usr/bin/env bash
# =============================================================================
# Timmy node — fetch LNbits admin API key
# Run this after LNbits is up and has been configured.
# Prints the LNBITS_API_KEY to add to Replit secrets.
#
# Run this after LNbits is up and your LND wallet is initialised.
# Prints LNBITS_URL and LNBITS_API_KEY to paste into Replit secrets.
#
# Compatibility:
# LNbits < 0.12 — auto-creates a wallet via superuser API
# LNbits >= 0.12 — superuser API removed; walks you through the Admin UI
# =============================================================================
set -euo pipefail
LNBITS_LOCAL="http://127.0.0.1:5000"
LNBITS_DATA_DIR="$HOME/.lnbits-data"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
SECRETS_FILE="$SCRIPT_DIR/.node-secrets"
GREEN='\033[0;32m'; CYAN='\033[0;36m'; YELLOW='\033[1;33m'; RED='\033[0;31m'; NC='\033[0m'
info() { echo -e "${CYAN}[keys]${NC} $*"; }
ok() { echo -e "${GREEN}[ok]${NC} $*"; }
ok() { echo -e "${GREEN}[ok]${NC} $*"; }
warn() { echo -e "${YELLOW}[warn]${NC} $*"; }
die() { echo -e "${RED}[error]${NC} $*" >&2; exit 1; }
# Check LNbits is up
curl -sf "$LNBITS_LOCAL/api/v1/health" &>/dev/null \
|| die "LNbits not reachable at $LNBITS_LOCAL. Run 'bash start.sh' first."
# ─── Helpers ─────────────────────────────────────────────────────────────────
# ─── Try to get super user from env file ─────────────────────────────────────
SUPER_USER=""
if [[ -f "$LNBITS_DATA_DIR/.env" ]]; then
SUPER_USER=$(grep LNBITS_SUPER_USER "$LNBITS_DATA_DIR/.env" | cut -d= -f2 | tr -d '"' || true)
fi
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
SECRETS_FILE="$SCRIPT_DIR/.node-secrets"
[[ -f "$SECRETS_FILE" ]] && source "$SECRETS_FILE"
SUPER_USER="${SUPER_USER:-${LNBITS_SUPER_USER:-}}"
if [[ -z "$SUPER_USER" ]]; then
# LNbits auto-generates a superuser on first run — find it in the SQLite DB
DB_FILE=$(find "$LNBITS_DATA_DIR" -name "*.sqlite3" 2>/dev/null | head -1 || true)
if [[ -n "$DB_FILE" ]] && command -v sqlite3 &>/dev/null; then
SUPER_USER=$(sqlite3 "$DB_FILE" "SELECT id FROM accounts WHERE is_super_user=1 LIMIT 1;" 2>/dev/null || true)
# Return 0 (true) if $1 >= $2 (semver comparison, macOS/BSD-safe)
# Uses python3 when available (already required for JSON parsing elsewhere),
# otherwise falls back to pure-bash numeric major.minor.patch comparison.
version_gte() {
local v1="$1" v2="$2"
if command -v python3 &>/dev/null; then
python3 - "$v1" "$v2" <<'PYEOF'
import sys
def parse(v):
parts = v.strip().split(".")
return [int(x) for x in (parts + ["0","0","0"])[:3]]
sys.exit(0 if parse(sys.argv[1]) >= parse(sys.argv[2]) else 1)
PYEOF
else
# Pure-bash fallback: split on dots, compare numerically
local IFS=.
# shellcheck disable=SC2206
local a=($v1) b=($v2)
for i in 0 1 2; do
local av="${a[$i]:-0}" bv="${b[$i]:-0}"
if (( av > bv )); then return 0; fi
if (( av < bv )); then return 1; fi
done
return 0 # equal
fi
fi
}
if [[ -z "$SUPER_USER" ]]; then
# Last resort: check LNbits log for the first-run superuser line
LOG_FILE="$HOME/Library/Logs/timmy-node/lnbits.log"
if [[ -f "$LOG_FILE" ]]; then
SUPER_USER=$(grep -oE "super user id: [a-f0-9]+" "$LOG_FILE" | tail -1 | awk '{print $4}' || true)
fi
fi
# Print the export template the operator needs to paste into Replit Secrets
print_export_template() {
local api_key="${1:-<paste-admin-key-here>}"
echo ""
echo -e "${GREEN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
echo -e "${GREEN} Paste these into Replit Secrets:${NC}"
echo -e "${GREEN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
echo ""
echo " export LNBITS_URL=\"http://bore.pub:<PORT>\" ← bore port from expose.sh"
echo " export LNBITS_API_KEY=\"${api_key}\""
echo ""
}
if [[ -z "$SUPER_USER" ]]; then
warn "Could not auto-detect LNbits super user ID."
# ─── Step 1: Confirm LNbits is reachable ─────────────────────────────────────
info "Checking LNbits at $LNBITS_LOCAL"
HEALTH_JSON="$(curl -sf --max-time 6 "$LNBITS_LOCAL/api/v1/health" 2>/dev/null || true)"
if [[ -z "$HEALTH_JSON" ]]; then
warn "LNbits is not reachable at $LNBITS_LOCAL (is it running?)."
warn "Showing manual setup instructions — run this script again once LNbits is up."
echo ""
echo " Visit: $LNBITS_LOCAL"
echo " 1. Create a wallet"
echo " 2. Go to Wallet → API Info"
echo " 3. Copy the Admin key"
echo " Start LNbits, then re-run:"
echo " bash $SCRIPT_DIR/get-lnbits-key.sh"
echo ""
echo " Then add to Replit:"
echo " LNBITS_URL = http://bore.pub:<PORT>"
echo " LNBITS_API_KEY = <admin-key>"
print_export_template
exit 0
fi
info "Super user: $SUPER_USER"
# ─── Step 2: Detect LNbits version ───────────────────────────────────────────
# Create a wallet for Timmy via superuser API
WALLET_RESPONSE=$(curl -sf -X POST "$LNBITS_LOCAL/api/v1/wallet" \
-H "Content-Type: application/json" \
-H "X-Api-Key: $SUPER_USER" \
-d '{"name":"Timmy"}' 2>/dev/null || true)
if [[ -n "$WALLET_RESPONSE" ]]; then
ADMIN_KEY=$(echo "$WALLET_RESPONSE" | python3 -c "import sys,json; d=json.load(sys.stdin); print(d.get('adminkey',''))" 2>/dev/null || true)
INKEY=$(echo "$WALLET_RESPONSE" | python3 -c "import sys,json; d=json.load(sys.stdin); print(d.get('inkey',''))" 2>/dev/null || true)
WALLET_ID=$(echo "$WALLET_RESPONSE" | python3 -c "import sys,json; d=json.load(sys.stdin); print(d.get('id',''))" 2>/dev/null || true)
if [[ -n "$ADMIN_KEY" ]]; then
ok "Timmy wallet created (ID: $WALLET_ID)"
echo ""
echo -e "${GREEN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
echo -e "${GREEN} Add these to Replit secrets:${NC}"
echo -e "${GREEN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
echo ""
echo " LNBITS_URL = http://bore.pub:<PORT> ← from expose.sh"
echo " LNBITS_API_KEY = $ADMIN_KEY"
echo ""
echo " Invoice key (read-only): $INKEY"
echo ""
# Save to secrets file
cat >> "$SECRETS_FILE" <<EOF
LNBITS_WALLET_ID="$WALLET_ID"
LNBITS_ADMIN_KEY="$ADMIN_KEY"
LNBITS_INVOICE_KEY="$INKEY"
EOF
ok "Keys saved to $SECRETS_FILE"
return 0 2>/dev/null || true
fi
LNBITS_VERSION=""
if command -v python3 &>/dev/null; then
LNBITS_VERSION="$(echo "$HEALTH_JSON" \
| python3 -c "import sys,json; d=json.load(sys.stdin); print(d.get('server_version',''))" \
2>/dev/null || true)"
fi
# Fallback: just print the wallet URL
warn "Could not create wallet automatically."
echo ""
echo " Visit $LNBITS_LOCAL in your browser:"
echo " 1. Create an account / wallet named 'Timmy'"
echo " 2. Wallet → API Info → copy Admin key"
echo " 3. Add to Replit: LNBITS_API_KEY = <admin key>"
echo ""
if [[ -z "$LNBITS_VERSION" ]]; then
warn "Could not parse server_version from health endpoint — assuming modern LNbits (>= 0.12)."
LNBITS_VERSION="0.12.0"
fi
info "LNbits version: ${LNBITS_VERSION}"
# ─── Step 3: Version-branched key retrieval ───────────────────────────────────
if version_gte "$LNBITS_VERSION" "0.12.0"; then
# ── LNbits >= 0.12 ─────────────────────────────────────────────────────────
# The superuser wallet API (POST /api/v1/wallet with X-Api-Key: <superuser>)
# was removed in 0.12. Use the Admin UI instead.
echo ""
warn "LNbits ${LNBITS_VERSION} — superuser API removed. Use the Admin UI:"
echo ""
echo " 1. Open the LNbits Admin UI in your browser:"
echo " ${LNBITS_LOCAL}/admin"
echo ""
echo " 2. In the Admin UI sidebar, click Users → Create User"
echo " Name: Timmy"
echo " This creates a new user with a default wallet."
echo ""
echo " 3. Click on the Timmy user → open their wallet."
echo ""
echo " 4. In the wallet page, click the key icon (API Info)."
echo " Copy the Admin key (not the Invoice key)."
echo ""
echo " 5. Paste the Admin key into Replit Secrets as LNBITS_API_KEY."
echo ""
print_export_template
else
# ── LNbits < 0.12 ──────────────────────────────────────────────────────────
# Superuser API available — try to auto-create a Timmy wallet.
info "LNbits ${LNBITS_VERSION} — attempting automatic wallet creation…"
# Locate the super user ID (env file or secrets file)
SUPER_USER=""
if [[ -f "$LNBITS_DATA_DIR/.env" ]]; then
SUPER_USER="$(grep LNBITS_SUPER_USER "$LNBITS_DATA_DIR/.env" \
| cut -d= -f2 | tr -d '"' || true)"
fi
[[ -f "$SECRETS_FILE" ]] && source "$SECRETS_FILE"
SUPER_USER="${SUPER_USER:-${LNBITS_SUPER_USER:-}}"
if [[ -z "$SUPER_USER" ]]; then
# Last resort: grep the startup log for the first-run superuser line
LOG_FILE="$HOME/Library/Logs/timmy-node/lnbits.log"
if [[ -f "$LOG_FILE" ]]; then
SUPER_USER="$(grep -oE "super user id: [a-f0-9]+" "$LOG_FILE" \
| tail -1 | awk '{print $4}' || true)"
fi
fi
if [[ -z "$SUPER_USER" ]]; then
warn "Could not locate LNbits super user ID automatically."
echo ""
echo " Visit ${LNBITS_LOCAL} and:"
echo " 1. Create a wallet"
echo " 2. Go to Wallet → API Info"
echo " 3. Copy the Admin key"
echo ""
print_export_template
exit 0
fi
info "Super user ID: ${SUPER_USER}"
# Create the Timmy wallet via superuser API
WALLET_RESPONSE="$(curl -sf -X POST "$LNBITS_LOCAL/api/v1/wallet" \
-H "Content-Type: application/json" \
-H "X-Api-Key: $SUPER_USER" \
-d '{"name":"Timmy"}' 2>/dev/null || true)"
if [[ -n "$WALLET_RESPONSE" ]]; then
ADMIN_KEY="$(echo "$WALLET_RESPONSE" \
| python3 -c "import sys,json; d=json.load(sys.stdin); print(d.get('adminkey',''))" \
2>/dev/null || true)"
INKEY="$(echo "$WALLET_RESPONSE" \
| python3 -c "import sys,json; d=json.load(sys.stdin); print(d.get('inkey',''))" \
2>/dev/null || true)"
WALLET_ID="$(echo "$WALLET_RESPONSE" \
| python3 -c "import sys,json; d=json.load(sys.stdin); print(d.get('id',''))" \
2>/dev/null || true)"
if [[ -n "$ADMIN_KEY" ]]; then
ok "Timmy wallet created (ID: ${WALLET_ID})"
echo " Invoice key (read-only): ${INKEY}"
# Append to secrets file so future runs can skip this step
cat >> "$SECRETS_FILE" <<EOF
LNBITS_WALLET_ID="${WALLET_ID}"
LNBITS_ADMIN_KEY="${ADMIN_KEY}"
LNBITS_INVOICE_KEY="${INKEY}"
EOF
ok "Keys saved to ${SECRETS_FILE}"
print_export_template "$ADMIN_KEY"
exit 0
fi
fi
# Wallet API call failed even though super user was found
warn "Wallet creation API call failed."
echo ""
echo " Visit ${LNBITS_LOCAL} and:"
echo " 1. Create a wallet named 'Timmy'"
echo " 2. Wallet → API Info → copy Admin key"
echo ""
print_export_template
fi
exit 0

View File

@@ -328,6 +328,11 @@ echo " 1. Create your LND wallet: lncli --lnddir=$LND_DIR create"
echo " 2. Check sync status: bash $SCRIPT_DIR/status.sh"
echo " 3. Once synced, get key: bash $SCRIPT_DIR/get-lnbits-key.sh"
echo ""
echo " LNbits key retrieval (get-lnbits-key.sh):"
echo " • LNbits < 0.12 — auto-creates a wallet via the superuser API"
echo " • LNbits >= 0.12 — superuser API removed; script walks you through"
echo " the Admin UI at http://localhost:5000/admin"
echo ""
echo " Secrets are in: $SECRETS_FILE"
echo " Logs are in: $LOG_DIR/"
echo ""

155
scripts/push-to-gitea.sh Executable file
View File

@@ -0,0 +1,155 @@
#!/usr/bin/env bash
# =============================================================================
# push-to-gitea.sh — Push current branch to local Gitea via bore tunnel
#
# Usage:
# bash scripts/push-to-gitea.sh [PORT]
#
# PORT is the bore.pub port shown when bore starts on your Mac:
# bore local 3000 --to bore.pub
# → "listening at bore.pub:NNNNN"
#
# If PORT is supplied it is saved to .bore-port for future calls.
# If PORT is omitted the script tries (in order):
# 1. .bore-port file in repo root
# 2. Port embedded in the current 'gitea' remote URL
# =============================================================================
set -euo pipefail
GITEA_HOST="bore.pub"
GITEA_USER="${GITEA_USER:-replit}"
# ─── Load token ───────────────────────────────────────────────────────────────
# Token is never hard-coded. Resolution order:
# 1. GITEA_TOKEN env var (export before running, or set in shell profile)
# 2. .gitea-credentials file in repo root (gitignored, one line: the token)
REPO_ROOT="$(git rev-parse --show-toplevel 2>/dev/null || pwd)"
CREDS_FILE="$REPO_ROOT/.gitea-credentials"
if [[ -z "${GITEA_TOKEN:-}" && -f "$CREDS_FILE" ]]; then
GITEA_TOKEN="$(tr -d '[:space:]' < "$CREDS_FILE")"
fi
if [[ -z "${GITEA_TOKEN:-}" ]]; then
echo -e "\033[0;31m[error]\033[0m GITEA_TOKEN is not set." >&2
echo "" >&2
echo " Set it in one of two ways:" >&2
echo "" >&2
echo " a) Export in your shell:" >&2
echo " export GITEA_TOKEN=<your-token>" >&2
echo "" >&2
echo " b) Save to a gitignored credentials file:" >&2
echo " echo <your-token> > .gitea-credentials" >&2
echo "" >&2
echo " Get your token from: http://bore.pub:<PORT>/user/settings/applications" >&2
exit 1
fi
BORE_PORT_FILE="$REPO_ROOT/.bore-port"
RED='\033[0;31m'; GREEN='\033[0;32m'; YELLOW='\033[1;33m'; CYAN='\033[0;36m'; NC='\033[0m'
info() { echo -e "${CYAN}[gitea]${NC} $*"; }
ok() { echo -e "${GREEN}[ok]${NC} $*"; }
warn() { echo -e "${YELLOW}[warn]${NC} $*"; }
die() { echo -e "${RED}[error]${NC} $*" >&2; exit 1; }
# ─── 1. Resolve bore port ────────────────────────────────────────────────────
PORT=""
if [[ -n "${1:-}" ]]; then
PORT="$1"
echo "$PORT" > "$BORE_PORT_FILE"
info "Port $PORT saved to .bore-port for future calls."
elif [[ -f "$BORE_PORT_FILE" ]]; then
PORT="$(tr -d '[:space:]' < "$BORE_PORT_FILE")"
info "Using port $PORT from .bore-port"
else
# Fall back to whatever port is currently in the gitea remote URL
CURRENT_REMOTE="$(git -C "$REPO_ROOT" remote get-url gitea 2>/dev/null || true)"
if [[ "$CURRENT_REMOTE" =~ :([0-9]{4,6})/ ]]; then
PORT="${BASH_REMATCH[1]}"
warn "No .bore-port file — trying last-known port $PORT from git remote."
fi
fi
if [[ -z "$PORT" ]]; then
die "Cannot determine bore port.
Start bore on your Mac:
bore local 3000 --to bore.pub
→ note the port shown (e.g. 61049)
Then either:
Pass the port once: bash scripts/push-to-gitea.sh <PORT>
Or save it manually: echo <PORT> > .bore-port"
fi
# ─── 2. Verify Gitea is reachable ────────────────────────────────────────────
GITEA_BASE="http://${GITEA_HOST}:${PORT}"
info "Checking Gitea at ${GITEA_BASE}"
if ! curl -sf --max-time 6 "${GITEA_BASE}/api/v1/version" -o /dev/null 2>/dev/null; then
die "Gitea is not reachable at ${GITEA_BASE}.
If the bore port changed, pass the new port:
bash scripts/push-to-gitea.sh <NEW_PORT>
If bore is not running on your Mac, start it:
bore local 3000 --to bore.pub"
fi
ok "Gitea reachable at ${GITEA_BASE}"
# ─── 3. Detect repo and branch ───────────────────────────────────────────────
# Prefer the repo name from the existing gitea remote URL — the Replit
# workspace directory name ('workspace') does not match the Gitea repo name.
EXISTING_REMOTE="$(git -C "$REPO_ROOT" remote get-url gitea 2>/dev/null || true)"
if [[ "$EXISTING_REMOTE" =~ /([^/]+)\.git$ ]]; then
REPO_NAME="${BASH_REMATCH[1]}"
else
REPO_NAME="$(basename "$REPO_ROOT")"
fi
BRANCH="$(git -C "$REPO_ROOT" rev-parse --abbrev-ref HEAD)"
if [[ "$BRANCH" == "HEAD" ]]; then
die "Detached HEAD state — check out a branch before pushing."
fi
REMOTE_URL="${GITEA_BASE}/${GITEA_USER}/${REPO_NAME}.git"
REMOTE_URL_WITH_CREDS="http://${GITEA_USER}:${GITEA_TOKEN}@${GITEA_HOST}:${PORT}/${GITEA_USER}/${REPO_NAME}.git"
info "Repo: ${REPO_NAME}"
info "Branch: ${BRANCH}"
# ─── 4. Update (or add) the gitea remote ─────────────────────────────────────
if git -C "$REPO_ROOT" remote get-url gitea &>/dev/null; then
git -C "$REPO_ROOT" remote set-url gitea "$REMOTE_URL_WITH_CREDS"
else
git -C "$REPO_ROOT" remote add gitea "$REMOTE_URL_WITH_CREDS"
info "Added 'gitea' remote."
fi
# ─── 5. Push ─────────────────────────────────────────────────────────────────
info "Pushing ${BRANCH} → gitea …"
echo ""
if git -C "$REPO_ROOT" push gitea "HEAD:${BRANCH}"; then
echo ""
ok "Pushed ${BRANCH} successfully."
echo ""
echo " Branch: ${GITEA_BASE}/${GITEA_USER}/${REPO_NAME}/src/branch/${BRANCH}"
echo " Open PR: ${GITEA_BASE}/${GITEA_USER}/${REPO_NAME}/compare/main...${BRANCH}"
echo ""
else
EXIT_CODE=$?
echo ""
die "git push failed (exit ${EXIT_CODE}). See error above."
fi

4
the-matrix/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
node_modules/
dist/
.DS_Store
*.local

80
the-matrix/README.md Normal file
View File

@@ -0,0 +1,80 @@
# Timmy Tower World
A Three.js 3D visualization of the Timmy agent network. Agents appear as
glowing icosahedra connected by lines, pulsing as they process jobs. A
matrix-rain particle effect fills the background.
## Quick start
```bash
npm install
npm run dev # Vite dev server with hot reload → http://localhost:5173
npm run build # Production bundle → dist/
npm run preview # Serve dist/ locally
```
## Configuration
Set these in a `.env.local` file (not committed):
```
VITE_WS_URL=ws://localhost:8080/ws/agents
```
Leave `VITE_WS_URL` unset to run in offline/demo mode (agents animate but
receive no live updates).
## Adding custom agents
**Edit one file only: `js/agent-defs.js`**
```js
export const AGENT_DEFS = [
// existing agents …
{
id: 'zeta', // unique string — matches WebSocket message agentId
label: 'ZETA', // displayed in the 3D HUD
color: 0xff00aa, // hex integer (0xRRGGBB)
role: 'observer', // shown under the label sprite
direction: 'east', // cardinal facing direction (north/east/south/west)
x: 12, // world-space position (horizontal)
z: 0, // world-space position (depth)
},
];
```
Nothing else needs to change. `agents.js` reads positions from `x`/`z`,
and `websocket.js` reads colors and labels — both derive everything from
`AGENT_DEFS`.
## Architecture
```
js/
├── agent-defs.js ← single source of truth: id, label, color, role, position
├── agents.js ← Three.js scene objects, animation loop
├── effects.js ← matrix rain particles, starfield
├── interaction.js ← OrbitControls (pan, zoom, rotate)
├── main.js ← entry point, rAF loop
├── ui.js ← DOM HUD overlay (FPS, agent states, chat)
└── websocket.js ← WebSocket reconnect, message dispatch
```
## WebSocket protocol
The backend sends JSON messages on the agents channel:
| `type` | Fields | Effect |
|-----------------|-------------------------------------|-------------------------------|
| `agent_state` | `agentId`, `state` | Update agent visual state |
| `job_started` | `agentId`, `jobId` | Increment job counter, pulse |
| `job_completed` | `agentId`, `jobId` | Decrement job counter |
| `chat` | `agentId`, `text` | Append to chat panel |
Agent states: `idle` (dim pulse) · `active` (bright pulse + fast ring spin)
## Stack
- [Three.js](https://threejs.org) 0.171.0 — 3D rendering
- [Vite](https://vitejs.dev) 5 — build + dev server
- `crypto.randomUUID()` — secure client session IDs (no external library)

110
the-matrix/index.html Normal file
View File

@@ -0,0 +1,110 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>The Matrix</title>
<link rel="manifest" href="/manifest.json" />
<meta name="theme-color" content="#00ff41" />
<!-- iOS PWA -->
<meta name="apple-mobile-web-app-capable" content="yes" />
<meta name="apple-mobile-web-app-status-bar-style" content="black-translucent" />
<meta name="apple-mobile-web-app-title" content="The Matrix" />
<link rel="apple-touch-icon" href="/icons/icon-192.png" />
<style>
* { margin: 0; padding: 0; box-sizing: border-box; }
body { background: #000; overflow: hidden; font-family: 'Courier New', monospace; }
canvas { display: block; }
#ui-overlay {
position: fixed; top: 0; left: 0; width: 100%; height: 100%;
pointer-events: none; z-index: 10;
}
#hud {
position: fixed; top: 16px; left: 16px;
color: #00ff41; font-size: 12px; line-height: 1.6;
text-shadow: 0 0 8px #00ff41;
pointer-events: none;
}
#hud h1 { font-size: 16px; letter-spacing: 4px; margin-bottom: 8px; color: #00ff88; }
#status-panel {
position: fixed; top: 16px; right: 16px;
color: #00ff41; font-size: 11px; line-height: 1.8;
text-shadow: 0 0 6px #00ff41; max-width: 240px;
}
#chat-panel {
position: fixed; bottom: 16px; left: 16px; right: 16px;
max-height: 180px; overflow-y: auto;
color: #00ff41; font-size: 11px; line-height: 1.6;
text-shadow: 0 0 4px #00ff41;
pointer-events: none;
}
.chat-entry { opacity: 0.8; }
.chat-entry .agent-name { color: #00ff88; font-weight: bold; }
.chat-ts { color: #004d18; font-size: 10px; }
#connection-status {
position: fixed; bottom: 16px; right: 16px;
font-size: 11px; color: #555;
pointer-events: none;
}
#connection-status.connected { color: #00ff41; text-shadow: 0 0 6px #00ff41; }
#chat-clear-btn {
position: fixed; bottom: 16px; right: 110px;
font-family: 'Courier New', monospace;
font-size: 10px; color: #004d18;
background: transparent; border: 1px solid #004d18;
padding: 2px 6px; cursor: pointer;
pointer-events: all; z-index: 20;
text-shadow: none;
transition: color 0.2s, border-color 0.2s;
}
#chat-clear-btn:hover { color: #00ff41; border-color: #00ff41; }
/* WebGL context-loss recovery overlay */
#webgl-recovery-overlay {
display: none;
position: fixed; inset: 0; z-index: 200;
background: rgba(0, 0, 0, 0.88);
justify-content: center;
align-items: center;
pointer-events: none;
}
#webgl-recovery-overlay .recovery-text {
color: #00ff41;
font-family: 'Courier New', monospace;
font-size: 16px;
letter-spacing: 3px;
text-shadow: 0 0 18px #00ff41, 0 0 6px #00ff41;
animation: ctx-blink 1.2s step-end infinite;
}
@keyframes ctx-blink {
0%, 100% { opacity: 1; }
50% { opacity: 0.25; }
}
</style>
</head>
<body>
<div id="ui-overlay">
<div id="hud">
<h1>TIMMY TOWER WORLD</h1>
<div id="agent-count">AGENTS: 0</div>
<div id="active-jobs">JOBS: 0</div>
<div id="fps">FPS: --</div>
</div>
<div id="status-panel">
<div id="agent-list"></div>
</div>
<div id="chat-panel"></div>
<div id="connection-status">OFFLINE</div>
</div>
<button id="chat-clear-btn" title="Clear chat history">CLEAR</button>
<div id="webgl-recovery-overlay">
<span class="recovery-text">GPU context lost — recovering...</span>
</div>
<script type="module" src="./js/main.js"></script>
</body>
</html>

View File

@@ -0,0 +1,28 @@
/**
* agent-defs.js — Single source of truth for all agent definitions.
*
* To add a new agent, append one entry to AGENT_DEFS below and pick an
* unused (x, z) position. No other file needs to be edited.
*
* Fields:
* id — unique string key used in WebSocket messages and state maps
* label — display name shown in the 3D HUD and chat panel
* color — hex integer (0xRRGGBB) used for Three.js materials and lights
* role — human-readable role string shown under the label sprite
* direction — cardinal facing direction (for future mesh orientation use)
* x, z — world-space position on the horizontal plane (y is always 0)
*/
export const AGENT_DEFS = [
{ id: 'alpha', label: 'ALPHA', color: 0x00ff88, role: 'orchestrator', direction: 'north', x: 0, z: -6 },
{ id: 'beta', label: 'BETA', color: 0x00aaff, role: 'worker', direction: 'east', x: 6, z: 0 },
{ id: 'gamma', label: 'GAMMA', color: 0xff6600, role: 'validator', direction: 'south', x: 0, z: 6 },
{ id: 'delta', label: 'DELTA', color: 0xaa00ff, role: 'monitor', direction: 'west', x: -6, z: 0 },
];
/**
* Convert an integer color (e.g. 0x00ff88) to a CSS hex string ('#00ff88').
* Useful for DOM styling and canvas rendering.
*/
export function colorToCss(intColor) {
return '#' + intColor.toString(16).padStart(6, '0');
}

207
the-matrix/js/agents.js Normal file
View File

@@ -0,0 +1,207 @@
import * as THREE from 'three';
import { AGENT_DEFS, colorToCss } from './agent-defs.js';
const agents = new Map();
let scene;
let connectionLines = [];
class Agent {
constructor(def) {
this.id = def.id;
this.label = def.label;
this.color = def.color;
this.role = def.role;
this.position = new THREE.Vector3(def.x, 0, def.z);
this.state = 'idle';
this.pulsePhase = Math.random() * Math.PI * 2;
this.group = new THREE.Group();
this.group.position.copy(this.position);
this._buildMeshes();
this._buildLabel();
}
_buildMeshes() {
const mat = new THREE.MeshStandardMaterial({
color: this.color,
emissive: this.color,
emissiveIntensity: 0.4,
roughness: 0.3,
metalness: 0.8,
});
const geo = new THREE.IcosahedronGeometry(0.7, 1);
this.core = new THREE.Mesh(geo, mat);
this.group.add(this.core);
const ringGeo = new THREE.TorusGeometry(1.1, 0.04, 8, 32);
const ringMat = new THREE.MeshBasicMaterial({ color: this.color, transparent: true, opacity: 0.5 });
this.ring = new THREE.Mesh(ringGeo, ringMat);
this.ring.rotation.x = Math.PI / 2;
this.group.add(this.ring);
const glowGeo = new THREE.SphereGeometry(1.3, 16, 16);
const glowMat = new THREE.MeshBasicMaterial({
color: this.color,
transparent: true,
opacity: 0.05,
side: THREE.BackSide,
});
this.glow = new THREE.Mesh(glowGeo, glowMat);
this.group.add(this.glow);
const light = new THREE.PointLight(this.color, 1.5, 10);
this.group.add(light);
this.light = light;
}
_buildLabel() {
const canvas = document.createElement('canvas');
canvas.width = 256; canvas.height = 64;
const ctx = canvas.getContext('2d');
ctx.fillStyle = 'rgba(0,0,0,0)';
ctx.fillRect(0, 0, 256, 64);
ctx.font = 'bold 22px Courier New';
ctx.fillStyle = colorToCss(this.color);
ctx.textAlign = 'center';
ctx.fillText(this.label, 128, 28);
ctx.font = '14px Courier New';
ctx.fillStyle = '#007722';
ctx.fillText(this.role.toUpperCase(), 128, 50);
const tex = new THREE.CanvasTexture(canvas);
const spriteMat = new THREE.SpriteMaterial({ map: tex, transparent: true });
this.sprite = new THREE.Sprite(spriteMat);
this.sprite.scale.set(2.4, 0.6, 1);
this.sprite.position.y = 2;
this.group.add(this.sprite);
}
update(time) {
const pulse = Math.sin(time * 0.002 + this.pulsePhase);
const active = this.state === 'active';
const intensity = active ? 0.6 + pulse * 0.4 : 0.2 + pulse * 0.1;
this.core.material.emissiveIntensity = intensity;
this.light.intensity = active ? 2 + pulse : 0.8 + pulse * 0.3;
const scale = active ? 1 + pulse * 0.08 : 1 + pulse * 0.03;
this.core.scale.setScalar(scale);
this.ring.rotation.y += active ? 0.03 : 0.008;
this.ring.material.opacity = 0.3 + pulse * 0.2;
this.group.position.y = this.position.y + Math.sin(time * 0.001 + this.pulsePhase) * 0.15;
}
setState(state) {
this.state = state;
}
dispose() {
this.core.geometry.dispose();
this.core.material.dispose();
this.ring.geometry.dispose();
this.ring.material.dispose();
this.glow.geometry.dispose();
this.glow.material.dispose();
if (this.sprite.material.map) this.sprite.material.map.dispose();
this.sprite.material.dispose();
}
}
export function initAgents(sceneRef) {
scene = sceneRef;
AGENT_DEFS.forEach(def => {
const agent = new Agent(def);
agents.set(def.id, agent);
scene.add(agent.group);
});
buildConnectionLines();
}
function buildConnectionLines() {
connectionLines.forEach(l => scene.remove(l));
connectionLines = [];
const agentList = [...agents.values()];
const lineMat = new THREE.LineBasicMaterial({
color: 0x003300,
transparent: true,
opacity: 0.4,
});
for (let i = 0; i < agentList.length; i++) {
for (let j = i + 1; j < agentList.length; j++) {
const a = agentList[i];
const b = agentList[j];
if (a.position.distanceTo(b.position) <= 8) {
const points = [a.position.clone(), b.position.clone()];
const geo = new THREE.BufferGeometry().setFromPoints(points);
const line = new THREE.Line(geo, lineMat.clone());
connectionLines.push(line);
scene.add(line);
}
}
}
}
export function updateAgents(time) {
agents.forEach(agent => agent.update(time));
}
export function getAgentCount() {
return agents.size;
}
export function setAgentState(agentId, state) {
const agent = agents.get(agentId);
if (agent) agent.setState(state);
}
export function getAgentDefs() {
return [...agents.values()].map(a => ({
id: a.id, label: a.label, role: a.role, color: a.color, state: a.state,
}));
}
/**
* Return a snapshot of each agent's current runtime state.
* Call before teardown so the state can be reapplied after reinit.
* @returns {Object.<string, string>} — e.g. { alpha: 'active', beta: 'idle' }
*/
export function getAgentStates() {
const snapshot = {};
agents.forEach((agent, id) => { snapshot[id] = agent.state; });
return snapshot;
}
/**
* Apply a previously captured state snapshot to freshly-created agents.
* Call immediately after initAgents() during context-restore reinit.
* @param {Object.<string, string>} snapshot
*/
export function applyAgentStates(snapshot) {
if (!snapshot) return;
for (const [id, state] of Object.entries(snapshot)) {
const agent = agents.get(id);
if (agent) agent.setState(state);
}
}
/**
* Dispose all agent GPU resources (geometries, materials, textures).
* Called before context-loss teardown.
*/
export function disposeAgents() {
agents.forEach(agent => agent.dispose());
agents.clear();
connectionLines.forEach(l => {
l.geometry.dispose();
l.material.dispose();
});
connectionLines = [];
scene = null;
}

99
the-matrix/js/effects.js vendored Normal file
View File

@@ -0,0 +1,99 @@
import * as THREE from 'three';
let rainParticles;
let rainPositions;
let rainVelocities;
const RAIN_COUNT = 2000;
export function initEffects(scene) {
initMatrixRain(scene);
initStarfield(scene);
}
function initMatrixRain(scene) {
const geo = new THREE.BufferGeometry();
const positions = new Float32Array(RAIN_COUNT * 3);
const velocities = new Float32Array(RAIN_COUNT);
const colors = new Float32Array(RAIN_COUNT * 3);
for (let i = 0; i < RAIN_COUNT; i++) {
positions[i * 3] = (Math.random() - 0.5) * 100;
positions[i * 3 + 1] = Math.random() * 50 + 5;
positions[i * 3 + 2] = (Math.random() - 0.5) * 100;
velocities[i] = 0.05 + Math.random() * 0.15;
const brightness = 0.3 + Math.random() * 0.7;
colors[i * 3] = 0;
colors[i * 3 + 1] = brightness;
colors[i * 3 + 2] = 0;
}
geo.setAttribute('position', new THREE.BufferAttribute(positions, 3));
geo.setAttribute('color', new THREE.BufferAttribute(colors, 3));
rainPositions = positions;
rainVelocities = velocities;
const mat = new THREE.PointsMaterial({
size: 0.12,
vertexColors: true,
transparent: true,
opacity: 0.7,
sizeAttenuation: true,
});
rainParticles = new THREE.Points(geo, mat);
scene.add(rainParticles);
}
function initStarfield(scene) {
const count = 500;
const geo = new THREE.BufferGeometry();
const positions = new Float32Array(count * 3);
for (let i = 0; i < count; i++) {
positions[i * 3] = (Math.random() - 0.5) * 300;
positions[i * 3 + 1] = Math.random() * 80 + 10;
positions[i * 3 + 2] = (Math.random() - 0.5) * 300;
}
geo.setAttribute('position', new THREE.BufferAttribute(positions, 3));
const mat = new THREE.PointsMaterial({
color: 0x003300,
size: 0.08,
transparent: true,
opacity: 0.5,
});
const stars = new THREE.Points(geo, mat);
scene.add(stars);
}
export function updateEffects(_time) {
if (!rainParticles) return;
for (let i = 0; i < RAIN_COUNT; i++) {
rainPositions[i * 3 + 1] -= rainVelocities[i];
if (rainPositions[i * 3 + 1] < -1) {
rainPositions[i * 3 + 1] = 40 + Math.random() * 20;
rainPositions[i * 3] = (Math.random() - 0.5) * 100;
rainPositions[i * 3 + 2] = (Math.random() - 0.5) * 100;
}
}
rainParticles.geometry.attributes.position.needsUpdate = true;
}
/**
* Release GPU resources held by rain and starfield.
* Called before context-loss teardown.
*/
export function disposeEffects() {
if (rainParticles) {
rainParticles.geometry.dispose();
rainParticles.material.dispose();
rainParticles = null;
}
rainPositions = null;
rainVelocities = null;
}

View File

@@ -0,0 +1,39 @@
import { OrbitControls } from 'three/addons/controls/OrbitControls.js';
let controls;
let _canvas;
const _noCtxMenu = e => e.preventDefault();
export function initInteraction(camera, renderer) {
controls = new OrbitControls(camera, renderer.domElement);
controls.enableDamping = true;
controls.dampingFactor = 0.05;
controls.screenSpacePanning = false;
controls.minDistance = 5;
controls.maxDistance = 80;
controls.maxPolarAngle = Math.PI / 2.1;
controls.target.set(0, 0, 0);
controls.update();
_canvas = renderer.domElement;
_canvas.addEventListener('contextmenu', _noCtxMenu);
}
export function updateControls() {
if (controls) controls.update();
}
/**
* Dispose OrbitControls event listeners.
* Called before context-loss teardown.
*/
export function disposeInteraction() {
if (_canvas) {
_canvas.removeEventListener('contextmenu', _noCtxMenu);
_canvas = null;
}
if (controls) {
controls.dispose();
controls = null;
}
}

113
the-matrix/js/main.js Normal file
View File

@@ -0,0 +1,113 @@
import { initWorld, onWindowResize, disposeWorld } from './world.js';
import {
initAgents, updateAgents, getAgentCount,
disposeAgents, getAgentStates, applyAgentStates,
} from './agents.js';
import { initEffects, updateEffects, disposeEffects } from './effects.js';
import { initUI, updateUI } from './ui.js';
import { initInteraction, disposeInteraction } from './interaction.js';
import { initWebSocket, getConnectionState, getJobCount } from './websocket.js';
let running = false;
let canvas = null;
/**
* Build (or rebuild) the Three.js world.
*
* @param {boolean} firstInit
* true — first page load: also starts UI and WebSocket
* false — context-restore reinit: skips UI/WS (they survive context loss)
* @param {Object.<string,string>|null} stateSnapshot
* Agent state map captured just before teardown; reapplied after initAgents.
*/
function buildWorld(firstInit, stateSnapshot) {
const { scene, camera, renderer } = initWorld(canvas);
canvas = renderer.domElement;
initEffects(scene);
initAgents(scene);
if (stateSnapshot) {
applyAgentStates(stateSnapshot);
}
initInteraction(camera, renderer);
if (firstInit) {
initUI();
initWebSocket(scene);
}
const ac = new AbortController();
window.addEventListener('resize', () => onWindowResize(camera, renderer), { signal: ac.signal });
let frameCount = 0;
let lastFpsTime = performance.now();
let currentFps = 0;
running = true;
function animate() {
if (!running) return;
requestAnimationFrame(animate);
const now = performance.now();
frameCount++;
if (now - lastFpsTime >= 1000) {
currentFps = Math.round(frameCount * 1000 / (now - lastFpsTime));
frameCount = 0;
lastFpsTime = now;
}
updateEffects(now);
updateAgents(now);
updateUI({
fps: currentFps,
agentCount: getAgentCount(),
jobCount: getJobCount(),
connectionState: getConnectionState(),
});
renderer.render(scene, camera);
}
animate();
return { scene, renderer, ac };
}
function teardown({ scene, renderer, ac }) {
running = false;
ac.abort();
disposeInteraction();
disposeEffects();
disposeAgents();
disposeWorld(renderer, scene);
}
function main() {
const $overlay = document.getElementById('webgl-recovery-overlay');
let handle = buildWorld(true, null);
canvas.addEventListener('webglcontextlost', event => {
event.preventDefault();
running = false;
if ($overlay) $overlay.style.display = 'flex';
});
canvas.addEventListener('webglcontextrestored', () => {
const snapshot = getAgentStates();
teardown(handle);
handle = buildWorld(false, snapshot);
if ($overlay) $overlay.style.display = 'none';
});
}
main();
if (import.meta.env.PROD && 'serviceWorker' in navigator) {
window.addEventListener('load', () => {
navigator.serviceWorker.register('/sw.js').catch(() => {});
});
}

178
the-matrix/js/ui.js Normal file
View File

@@ -0,0 +1,178 @@
import { getAgentDefs } from './agents.js';
import { AGENT_DEFS, colorToCss } from './agent-defs.js';
const $agentCount = document.getElementById('agent-count');
const $activeJobs = document.getElementById('active-jobs');
const $fps = document.getElementById('fps');
const $agentList = document.getElementById('agent-list');
const $connStatus = document.getElementById('connection-status');
const $chatPanel = document.getElementById('chat-panel');
const $clearBtn = document.getElementById('chat-clear-btn');
const MAX_CHAT_ENTRIES = 12;
const MAX_STORED = 100;
const STORAGE_PREFIX = 'matrix:chat:';
const chatEntries = [];
const chatHistory = {};
let uiInitialized = false;
function storageKey(agentId) {
return STORAGE_PREFIX + agentId;
}
export function loadChatHistory(agentId) {
try {
const raw = localStorage.getItem(storageKey(agentId));
if (!raw) return [];
const parsed = JSON.parse(raw);
if (!Array.isArray(parsed)) return [];
return parsed.filter(m =>
m && typeof m.agentLabel === 'string' && typeof m.text === 'string'
);
} catch {
return [];
}
}
export function saveChatHistory(agentId, messages) {
try {
localStorage.setItem(storageKey(agentId), JSON.stringify(messages.slice(-MAX_STORED)));
} catch {
}
}
function formatTimestamp(ts) {
const d = new Date(ts);
const hh = String(d.getHours()).padStart(2, '0');
const mm = String(d.getMinutes()).padStart(2, '0');
return `${hh}:${mm}`;
}
function buildChatEntry(agentLabel, message, cssColor, timestamp) {
const color = cssColor || '#00ff41';
const entry = document.createElement('div');
entry.className = 'chat-entry';
const ts = timestamp ? `<span class="chat-ts">[${formatTimestamp(timestamp)}]</span> ` : '';
entry.innerHTML = `${ts}<span class="agent-name" style="color:${color}">${agentLabel}</span>: ${escapeHtml(message)}`;
return entry;
}
function loadAllHistories() {
const all = [];
const agentIds = [...AGENT_DEFS.map(d => d.id), 'sys'];
for (const id of agentIds) {
const msgs = loadChatHistory(id);
chatHistory[id] = msgs;
all.push(...msgs);
}
all.sort((a, b) => (a.timestamp || 0) - (b.timestamp || 0));
for (const msg of all.slice(-MAX_CHAT_ENTRIES)) {
const entry = buildChatEntry(msg.agentLabel, msg.text, msg.cssColor, msg.timestamp);
chatEntries.push(entry);
$chatPanel.appendChild(entry);
}
$chatPanel.scrollTop = $chatPanel.scrollHeight;
}
function clearAllHistories() {
const agentIds = [...AGENT_DEFS.map(d => d.id), 'sys'];
for (const id of agentIds) {
localStorage.removeItem(storageKey(id));
chatHistory[id] = [];
}
while ($chatPanel.firstChild) $chatPanel.removeChild($chatPanel.firstChild);
chatEntries.length = 0;
}
export function initUI() {
if (uiInitialized) return;
uiInitialized = true;
renderAgentList();
loadAllHistories();
if ($clearBtn) {
$clearBtn.addEventListener('click', clearAllHistories);
}
}
function renderAgentList() {
const defs = getAgentDefs();
$agentList.innerHTML = defs.map(a => {
const css = colorToCss(a.color);
return `<div class="agent-row">
<span class="label">[</span>
<span style="color:${css}">${a.label}</span>
<span class="label">]</span>
<span id="agent-state-${a.id}" style="color:#003300"> IDLE</span>
</div>`;
}).join('');
}
export function updateUI({ fps, agentCount, jobCount, connectionState }) {
$fps.textContent = `FPS: ${fps}`;
$agentCount.textContent = `AGENTS: ${agentCount}`;
$activeJobs.textContent = `JOBS: ${jobCount}`;
if (connectionState === 'connected') {
$connStatus.textContent = '● CONNECTED';
$connStatus.className = 'connected';
} else if (connectionState === 'connecting') {
$connStatus.textContent = '◌ CONNECTING...';
$connStatus.className = '';
} else {
$connStatus.textContent = '○ OFFLINE';
$connStatus.className = '';
}
const defs = getAgentDefs();
defs.forEach(a => {
const el = document.getElementById(`agent-state-${a.id}`);
if (el) {
el.textContent = ` ${a.state.toUpperCase()}`;
el.style.color = a.state === 'active' ? '#00ff41' : '#003300';
}
});
}
/**
* Append a message to the chat panel and optionally persist it.
* @param {string} agentLabel — display name
* @param {string} message — raw text (HTML-escaped before insertion)
* @param {string} cssColor — CSS color string e.g. '#00ff88'
* @param {string} [agentId] — storage key; omit to skip persistence
*/
export function appendChatMessage(agentLabel, message, cssColor, agentId) {
const timestamp = Date.now();
const entry = buildChatEntry(agentLabel, message, cssColor, timestamp);
chatEntries.push(entry);
if (chatEntries.length > MAX_CHAT_ENTRIES) {
const removed = chatEntries.shift();
$chatPanel.removeChild(removed);
}
$chatPanel.appendChild(entry);
$chatPanel.scrollTop = $chatPanel.scrollHeight;
if (agentId) {
if (!chatHistory[agentId]) chatHistory[agentId] = [];
chatHistory[agentId].push({ agentLabel, text: message, cssColor, agentId, timestamp });
if (chatHistory[agentId].length > MAX_STORED) {
chatHistory[agentId] = chatHistory[agentId].slice(-MAX_STORED);
}
saveChatHistory(agentId, chatHistory[agentId]);
}
}
function escapeHtml(str) {
return str
.replace(/&/g, '&amp;')
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;');
}

115
the-matrix/js/websocket.js Normal file
View File

@@ -0,0 +1,115 @@
import { AGENT_DEFS, colorToCss } from './agent-defs.js';
import { setAgentState } from './agents.js';
import { appendChatMessage } from './ui.js';
const WS_URL = import.meta.env.VITE_WS_URL || '';
const agentById = Object.fromEntries(AGENT_DEFS.map(d => [d.id, d]));
let ws = null;
let connectionState = 'disconnected';
let jobCount = 0;
let reconnectTimer = null;
const RECONNECT_DELAY_MS = 5000;
export function initWebSocket(_scene) {
if (!WS_URL) {
connectionState = 'disconnected';
return;
}
connect();
}
function connect() {
if (ws) {
ws.onclose = null;
ws.close();
}
connectionState = 'connecting';
try {
ws = new WebSocket(WS_URL);
} catch {
connectionState = 'disconnected';
scheduleReconnect();
return;
}
ws.onopen = () => {
connectionState = 'connected';
clearTimeout(reconnectTimer);
ws.send(JSON.stringify({
type: 'subscribe',
channel: 'agents',
clientId: crypto.randomUUID(),
}));
};
ws.onmessage = (event) => {
try {
handleMessage(JSON.parse(event.data));
} catch {
}
};
ws.onerror = () => {
connectionState = 'disconnected';
};
ws.onclose = () => {
connectionState = 'disconnected';
scheduleReconnect();
};
}
function scheduleReconnect() {
clearTimeout(reconnectTimer);
reconnectTimer = setTimeout(connect, RECONNECT_DELAY_MS);
}
function handleMessage(msg) {
switch (msg.type) {
case 'agent_state': {
if (msg.agentId && msg.state) {
setAgentState(msg.agentId, msg.state);
}
break;
}
case 'job_started': {
jobCount++;
if (msg.agentId) setAgentState(msg.agentId, 'active');
logEvent(`JOB ${(msg.jobId || '').slice(0, 8)} started`);
break;
}
case 'job_completed': {
if (jobCount > 0) jobCount--;
if (msg.agentId) setAgentState(msg.agentId, 'idle');
logEvent(`JOB ${(msg.jobId || '').slice(0, 8)} completed`);
break;
}
case 'chat': {
const def = agentById[msg.agentId];
if (def && msg.text) {
appendChatMessage(def.label, msg.text, colorToCss(def.color), def.id);
}
break;
}
case 'agent_count':
break;
default:
break;
}
}
function logEvent(text) {
appendChatMessage('SYS', text, colorToCss(0x003300), 'sys');
}
export function getConnectionState() {
return connectionState;
}
export function getJobCount() {
return jobCount;
}

95
the-matrix/js/world.js Normal file
View File

@@ -0,0 +1,95 @@
import * as THREE from 'three';
let scene, camera, renderer;
const _worldObjects = [];
/**
* @param {HTMLCanvasElement|null} existingCanvas — pass the saved canvas on
* re-init so Three.js reuses the same DOM element instead of creating a new one
*/
export function initWorld(existingCanvas) {
_worldObjects.length = 0;
scene = new THREE.Scene();
scene.background = new THREE.Color(0x000000);
scene.fog = new THREE.FogExp2(0x000000, 0.035);
camera = new THREE.PerspectiveCamera(60, window.innerWidth / window.innerHeight, 0.1, 500);
camera.position.set(0, 12, 28);
camera.lookAt(0, 0, 0);
renderer = new THREE.WebGLRenderer({
antialias: true,
canvas: existingCanvas || undefined,
});
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.setPixelRatio(Math.min(window.devicePixelRatio, 2));
renderer.outputColorSpace = THREE.SRGBColorSpace;
if (!existingCanvas) {
document.body.prepend(renderer.domElement);
}
addLights(scene);
addGrid(scene);
return { scene, camera, renderer };
}
/**
* Dispose only world-owned geometries, materials, and the renderer.
* Agent and effect objects are disposed by their own modules before this runs.
*/
export function disposeWorld(renderer, _scene) {
for (const obj of _worldObjects) {
if (obj.geometry) obj.geometry.dispose();
if (obj.material) {
const mats = Array.isArray(obj.material) ? obj.material : [obj.material];
mats.forEach(m => {
if (m.map) m.map.dispose();
m.dispose();
});
}
}
_worldObjects.length = 0;
renderer.dispose();
}
function addLights(scene) {
const ambient = new THREE.AmbientLight(0x001a00, 0.6);
scene.add(ambient);
const point = new THREE.PointLight(0x00ff41, 2, 80);
point.position.set(0, 20, 0);
scene.add(point);
const fill = new THREE.DirectionalLight(0x003300, 0.4);
fill.position.set(-10, 10, 10);
scene.add(fill);
}
function addGrid(scene) {
const grid = new THREE.GridHelper(100, 40, 0x003300, 0x001a00);
grid.position.y = -0.01;
scene.add(grid);
_worldObjects.push(grid);
const planeGeo = new THREE.PlaneGeometry(100, 100);
const planeMat = new THREE.MeshBasicMaterial({
color: 0x000a00,
transparent: true,
opacity: 0.5,
});
const plane = new THREE.Mesh(planeGeo, planeMat);
plane.rotation.x = -Math.PI / 2;
plane.position.y = -0.02;
scene.add(plane);
_worldObjects.push(plane);
}
export function onWindowResize(camera, renderer) {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}

996
the-matrix/package-lock.json generated Normal file
View File

@@ -0,0 +1,996 @@
{
"name": "the-matrix",
"version": "0.1.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "the-matrix",
"version": "0.1.0",
"dependencies": {
"three": "0.171.0"
},
"devDependencies": {
"vite": "^5.4.0"
}
},
"node_modules/@esbuild/aix-ppc64": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz",
"integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"aix"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/android-arm": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz",
"integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/android-arm64": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz",
"integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/android-x64": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz",
"integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/darwin-arm64": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz",
"integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/darwin-x64": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz",
"integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/freebsd-arm64": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz",
"integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/freebsd-x64": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz",
"integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/linux-arm": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz",
"integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/linux-arm64": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz",
"integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/linux-ia32": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz",
"integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/linux-loong64": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz",
"integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==",
"cpu": [
"loong64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/linux-mips64el": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz",
"integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==",
"cpu": [
"mips64el"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/linux-ppc64": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz",
"integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/linux-riscv64": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz",
"integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==",
"cpu": [
"riscv64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/linux-s390x": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz",
"integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==",
"cpu": [
"s390x"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/linux-x64": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz",
"integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/netbsd-x64": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz",
"integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"netbsd"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/openbsd-x64": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz",
"integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openbsd"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/sunos-x64": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz",
"integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"sunos"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/win32-arm64": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz",
"integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/win32-ia32": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz",
"integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/win32-x64": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz",
"integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@rollup/rollup-android-arm-eabi": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.59.0.tgz",
"integrity": "sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
]
},
"node_modules/@rollup/rollup-android-arm64": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.59.0.tgz",
"integrity": "sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
]
},
"node_modules/@rollup/rollup-darwin-arm64": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.59.0.tgz",
"integrity": "sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@rollup/rollup-darwin-x64": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.59.0.tgz",
"integrity": "sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@rollup/rollup-freebsd-arm64": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.59.0.tgz",
"integrity": "sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
]
},
"node_modules/@rollup/rollup-freebsd-x64": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.59.0.tgz",
"integrity": "sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
]
},
"node_modules/@rollup/rollup-linux-arm-gnueabihf": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.59.0.tgz",
"integrity": "sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-arm-musleabihf": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.59.0.tgz",
"integrity": "sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-arm64-gnu": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.59.0.tgz",
"integrity": "sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-arm64-musl": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.59.0.tgz",
"integrity": "sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-loong64-gnu": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.59.0.tgz",
"integrity": "sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==",
"cpu": [
"loong64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-loong64-musl": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.59.0.tgz",
"integrity": "sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==",
"cpu": [
"loong64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-ppc64-gnu": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.59.0.tgz",
"integrity": "sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-ppc64-musl": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.59.0.tgz",
"integrity": "sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-riscv64-gnu": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.59.0.tgz",
"integrity": "sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==",
"cpu": [
"riscv64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-riscv64-musl": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.59.0.tgz",
"integrity": "sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==",
"cpu": [
"riscv64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-s390x-gnu": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.59.0.tgz",
"integrity": "sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==",
"cpu": [
"s390x"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-x64-gnu": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.59.0.tgz",
"integrity": "sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-x64-musl": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.59.0.tgz",
"integrity": "sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-openbsd-x64": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.59.0.tgz",
"integrity": "sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openbsd"
]
},
"node_modules/@rollup/rollup-openharmony-arm64": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.59.0.tgz",
"integrity": "sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openharmony"
]
},
"node_modules/@rollup/rollup-win32-arm64-msvc": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.59.0.tgz",
"integrity": "sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@rollup/rollup-win32-ia32-msvc": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.59.0.tgz",
"integrity": "sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@rollup/rollup-win32-x64-gnu": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.59.0.tgz",
"integrity": "sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@rollup/rollup-win32-x64-msvc": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.59.0.tgz",
"integrity": "sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@types/estree": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz",
"integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==",
"dev": true,
"license": "MIT"
},
"node_modules/esbuild": {
"version": "0.21.5",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz",
"integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"bin": {
"esbuild": "bin/esbuild"
},
"engines": {
"node": ">=12"
},
"optionalDependencies": {
"@esbuild/aix-ppc64": "0.21.5",
"@esbuild/android-arm": "0.21.5",
"@esbuild/android-arm64": "0.21.5",
"@esbuild/android-x64": "0.21.5",
"@esbuild/darwin-arm64": "0.21.5",
"@esbuild/darwin-x64": "0.21.5",
"@esbuild/freebsd-arm64": "0.21.5",
"@esbuild/freebsd-x64": "0.21.5",
"@esbuild/linux-arm": "0.21.5",
"@esbuild/linux-arm64": "0.21.5",
"@esbuild/linux-ia32": "0.21.5",
"@esbuild/linux-loong64": "0.21.5",
"@esbuild/linux-mips64el": "0.21.5",
"@esbuild/linux-ppc64": "0.21.5",
"@esbuild/linux-riscv64": "0.21.5",
"@esbuild/linux-s390x": "0.21.5",
"@esbuild/linux-x64": "0.21.5",
"@esbuild/netbsd-x64": "0.21.5",
"@esbuild/openbsd-x64": "0.21.5",
"@esbuild/sunos-x64": "0.21.5",
"@esbuild/win32-arm64": "0.21.5",
"@esbuild/win32-ia32": "0.21.5",
"@esbuild/win32-x64": "0.21.5"
}
},
"node_modules/fsevents": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/nanoid": {
"version": "3.3.11",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz",
"integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"bin": {
"nanoid": "bin/nanoid.cjs"
},
"engines": {
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
}
},
"node_modules/picocolors": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
"dev": true,
"license": "ISC"
},
"node_modules/postcss": {
"version": "8.5.8",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.8.tgz",
"integrity": "sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==",
"dev": true,
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/postcss/"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/postcss"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"dependencies": {
"nanoid": "^3.3.11",
"picocolors": "^1.1.1",
"source-map-js": "^1.2.1"
},
"engines": {
"node": "^10 || ^12 || >=14"
}
},
"node_modules/rollup": {
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.59.0.tgz",
"integrity": "sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/estree": "1.0.8"
},
"bin": {
"rollup": "dist/bin/rollup"
},
"engines": {
"node": ">=18.0.0",
"npm": ">=8.0.0"
},
"optionalDependencies": {
"@rollup/rollup-android-arm-eabi": "4.59.0",
"@rollup/rollup-android-arm64": "4.59.0",
"@rollup/rollup-darwin-arm64": "4.59.0",
"@rollup/rollup-darwin-x64": "4.59.0",
"@rollup/rollup-freebsd-arm64": "4.59.0",
"@rollup/rollup-freebsd-x64": "4.59.0",
"@rollup/rollup-linux-arm-gnueabihf": "4.59.0",
"@rollup/rollup-linux-arm-musleabihf": "4.59.0",
"@rollup/rollup-linux-arm64-gnu": "4.59.0",
"@rollup/rollup-linux-arm64-musl": "4.59.0",
"@rollup/rollup-linux-loong64-gnu": "4.59.0",
"@rollup/rollup-linux-loong64-musl": "4.59.0",
"@rollup/rollup-linux-ppc64-gnu": "4.59.0",
"@rollup/rollup-linux-ppc64-musl": "4.59.0",
"@rollup/rollup-linux-riscv64-gnu": "4.59.0",
"@rollup/rollup-linux-riscv64-musl": "4.59.0",
"@rollup/rollup-linux-s390x-gnu": "4.59.0",
"@rollup/rollup-linux-x64-gnu": "4.59.0",
"@rollup/rollup-linux-x64-musl": "4.59.0",
"@rollup/rollup-openbsd-x64": "4.59.0",
"@rollup/rollup-openharmony-arm64": "4.59.0",
"@rollup/rollup-win32-arm64-msvc": "4.59.0",
"@rollup/rollup-win32-ia32-msvc": "4.59.0",
"@rollup/rollup-win32-x64-gnu": "4.59.0",
"@rollup/rollup-win32-x64-msvc": "4.59.0",
"fsevents": "~2.3.2"
}
},
"node_modules/source-map-js": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
"integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==",
"dev": true,
"license": "BSD-3-Clause",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/three": {
"version": "0.171.0",
"resolved": "https://registry.npmjs.org/three/-/three-0.171.0.tgz",
"integrity": "sha512-Y/lAXPaKZPcEdkKjh0JOAHVv8OOnv/NDJqm0wjfCzyQmfKxV7zvkwsnBgPBKTzJHToSOhRGQAGbPJObT59B/PQ==",
"license": "MIT"
},
"node_modules/vite": {
"version": "5.4.21",
"resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz",
"integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==",
"dev": true,
"license": "MIT",
"dependencies": {
"esbuild": "^0.21.3",
"postcss": "^8.4.43",
"rollup": "^4.20.0"
},
"bin": {
"vite": "bin/vite.js"
},
"engines": {
"node": "^18.0.0 || >=20.0.0"
},
"funding": {
"url": "https://github.com/vitejs/vite?sponsor=1"
},
"optionalDependencies": {
"fsevents": "~2.3.3"
},
"peerDependencies": {
"@types/node": "^18.0.0 || >=20.0.0",
"less": "*",
"lightningcss": "^1.21.0",
"sass": "*",
"sass-embedded": "*",
"stylus": "*",
"sugarss": "*",
"terser": "^5.4.0"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
},
"less": {
"optional": true
},
"lightningcss": {
"optional": true
},
"sass": {
"optional": true
},
"sass-embedded": {
"optional": true
},
"stylus": {
"optional": true
},
"sugarss": {
"optional": true
},
"terser": {
"optional": true
}
}
}
}
}

17
the-matrix/package.json Normal file
View File

@@ -0,0 +1,17 @@
{
"name": "the-matrix",
"version": "0.1.0",
"private": true,
"type": "module",
"scripts": {
"dev": "vite",
"build": "vite build",
"preview": "vite preview"
},
"dependencies": {
"three": "0.171.0"
},
"devDependencies": {
"vite": "^5.4.0"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 66 KiB

View File

@@ -0,0 +1,24 @@
{
"name": "The Matrix",
"short_name": "The Matrix",
"description": "Timmy Tower World — live agent network visualization",
"start_url": "/",
"display": "standalone",
"orientation": "landscape",
"background_color": "#000000",
"theme_color": "#00ff41",
"icons": [
{
"src": "/icons/icon-192.png",
"sizes": "192x192",
"type": "image/png",
"purpose": "any maskable"
},
{
"src": "/icons/icon-512.png",
"sizes": "512x512",
"type": "image/png",
"purpose": "any maskable"
}
]
}

39
the-matrix/sw.js Normal file
View File

@@ -0,0 +1,39 @@
/* sw.js — Matrix PWA service worker
* PRECACHE_URLS is replaced at build time by the generate-sw Vite plugin.
* Registration is gated to import.meta.env.PROD in main.js, so this template
* file is never evaluated by browsers during development.
*/
const CACHE_NAME = 'timmy-matrix-v1';
const PRECACHE_URLS = __PRECACHE_URLS__;
self.addEventListener('install', event => {
event.waitUntil(
caches.open(CACHE_NAME).then(cache => cache.addAll(PRECACHE_URLS))
);
self.skipWaiting();
});
self.addEventListener('activate', event => {
event.waitUntil(
caches.keys().then(keys =>
Promise.all(keys.filter(k => k !== CACHE_NAME).map(k => caches.delete(k)))
)
);
self.clients.claim();
});
self.addEventListener('fetch', event => {
if (event.request.method !== 'GET') return;
event.respondWith(
caches.match(event.request).then(cached => {
if (cached) return cached;
return fetch(event.request).then(response => {
if (!response || response.status !== 200 || response.type !== 'basic') {
return response;
}
caches.open(CACHE_NAME).then(cache => cache.put(event.request, response.clone()));
return response;
});
})
);
});

46
the-matrix/vite.config.js Normal file
View File

@@ -0,0 +1,46 @@
import { defineConfig } from 'vite';
import { readFileSync, writeFileSync } from 'fs';
function generateSW() {
return {
name: 'generate-sw',
apply: 'build',
closeBundle() {
const staticAssets = [
'/',
'/manifest.json',
'/icons/icon-192.png',
'/icons/icon-512.png',
];
try {
const manifest = JSON.parse(readFileSync('dist/.vite/manifest.json', 'utf-8'));
for (const entry of Object.values(manifest)) {
staticAssets.push('/' + entry.file);
if (entry.css) entry.css.forEach(f => staticAssets.push('/' + f));
}
} catch {
}
const template = readFileSync('sw.js', 'utf-8');
const out = template.replace('__PRECACHE_URLS__', JSON.stringify(staticAssets, null, 4));
writeFileSync('dist/sw.js', out);
console.log('[generate-sw] wrote dist/sw.js with', staticAssets.length, 'precache URLs');
},
};
}
export default defineConfig({
root: '.',
build: {
outDir: 'dist',
assetsDir: 'assets',
target: 'esnext',
manifest: true,
},
plugins: [generateSW()],
server: {
host: true,
},
});

View File

@@ -6,9 +6,6 @@
{
"path": "./lib/db"
},
{
"path": "./lib/api-client-react"
},
{
"path": "./lib/api-zod"
},