Task #2: MVP Foundation — injectable services, DB schema, smoke test
DB schema
- jobs and invoices tables added to lib/db/src/schema/
- schema barrel updated (jobs, invoices, conversations, messages)
- pnpm --filter @workspace/db run push applied successfully
LNbitsService (artifacts/api-server/src/lib/lnbits.ts)
- Injectable class accepting optional { url, apiKey } config
- Falls back to LNBITS_URL / LNBITS_API_KEY env vars
- Auto-detects stub mode when credentials are absent; logs warning
- createInvoice() -> { paymentHash, paymentRequest }
- checkInvoicePaid() -> boolean
- stubMarkPaid() helper for dev/test flows
- Real LNbits REST v1 calls wired behind the stub guard
AgentService (artifacts/api-server/src/lib/agent.ts)
- Injectable class with configurable evalModel / workModel
- evaluateRequest(text) -> { accepted: boolean, reason: string }
uses claude-haiku-4-5; strips markdown fences before JSON parse
- executeWork(text) -> { result: string } uses claude-sonnet-4-6
- Wired via Replit Anthropic AI Integration (no user API key)
PricingService (artifacts/api-server/src/lib/pricing.ts)
- Injectable class with configurable fee/bucket thresholds
- calculateEvalFeeSats() -> 10 sats (fixed)
- calculateWorkFeeSats(text) -> 50/100/250 by char-length bucket
- Zero LLM involvement; fully deterministic
Smoke test (scripts/src/smoke.ts)
- pnpm --filter @workspace/scripts run smoke
- Verifies LNbits stub: create, check unpaid, mark paid, check paid
- Verifies Anthropic: evaluateRequest round-trip
- Both checks passed
replit.md
- Documented required (LNBITS_URL, LNBITS_API_KEY) and auto-provisioned secrets
- Stub-mode behaviour explained
This commit is contained in:
@@ -1,71 +1,72 @@
|
||||
import { anthropic } from "@workspace/integrations-anthropic-ai";
|
||||
|
||||
const EVAL_MODEL = "claude-haiku-4-5";
|
||||
const WORK_MODEL = "claude-sonnet-4-6";
|
||||
import type Anthropic from "@anthropic-ai/sdk";
|
||||
|
||||
export interface EvalResult {
|
||||
approved: boolean;
|
||||
accepted: boolean;
|
||||
reason: string;
|
||||
}
|
||||
|
||||
export async function evaluateRequest(request: string): Promise<EvalResult> {
|
||||
const message = await anthropic.messages.create({
|
||||
model: EVAL_MODEL,
|
||||
max_tokens: 8192,
|
||||
system: `You are Timmy, an AI agent gatekeeper. Your job is to evaluate user requests.
|
||||
A request should be APPROVED if it is:
|
||||
- Clear and specific enough to act on
|
||||
- Ethical, lawful, and not harmful
|
||||
- Within the capabilities of a general-purpose AI assistant
|
||||
export interface WorkResult {
|
||||
result: string;
|
||||
}
|
||||
|
||||
A request should be REJECTED if it is:
|
||||
- Harmful, illegal, or unethical
|
||||
- Completely incoherent or impossible to act on
|
||||
- Spam or an attempt to abuse the system
|
||||
export interface AgentConfig {
|
||||
evalModel?: string;
|
||||
workModel?: string;
|
||||
}
|
||||
|
||||
Respond ONLY with valid JSON in this exact format:
|
||||
{"approved": true, "reason": "Brief explanation"}
|
||||
or
|
||||
{"approved": false, "reason": "Brief explanation of why it was rejected"}`,
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: `Evaluate this request: ${request}`,
|
||||
},
|
||||
],
|
||||
});
|
||||
export class AgentService {
|
||||
private readonly evalModel: string;
|
||||
private readonly workModel: string;
|
||||
|
||||
const block = message.content[0];
|
||||
if (block.type !== "text") {
|
||||
throw new Error("Unexpected response type from eval model");
|
||||
constructor(config?: AgentConfig) {
|
||||
this.evalModel = config?.evalModel ?? "claude-haiku-4-5";
|
||||
this.workModel = config?.workModel ?? "claude-sonnet-4-6";
|
||||
}
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(block.text) as { approved: boolean; reason: string };
|
||||
return { approved: Boolean(parsed.approved), reason: parsed.reason ?? "" };
|
||||
} catch {
|
||||
throw new Error(`Failed to parse eval response: ${block.text}`);
|
||||
async evaluateRequest(requestText: string): Promise<EvalResult> {
|
||||
const message = await anthropic.messages.create({
|
||||
model: this.evalModel,
|
||||
max_tokens: 8192,
|
||||
system: `You are Timmy, an AI agent gatekeeper. Evaluate whether a request is acceptable to act on.
|
||||
ACCEPT if the request is: clear enough to act on, ethical, lawful, and within the capability of a general-purpose AI.
|
||||
REJECT if the request is: harmful, illegal, unethical, incoherent, or spam.
|
||||
Respond ONLY with valid JSON: {"accepted": true, "reason": "..."} or {"accepted": false, "reason": "..."}`,
|
||||
messages: [{ role: "user", content: `Evaluate this request: ${requestText}` }],
|
||||
} as Parameters<typeof anthropic.messages.create>[0]);
|
||||
|
||||
const block = message.content[0] as Anthropic.TextBlock;
|
||||
if (block.type !== "text") {
|
||||
throw new Error("Unexpected non-text response from eval model");
|
||||
}
|
||||
|
||||
let parsed: { accepted: boolean; reason: string };
|
||||
try {
|
||||
const raw = block.text.replace(/^```(?:json)?\s*/i, "").replace(/\s*```$/, "").trim();
|
||||
parsed = JSON.parse(raw) as { accepted: boolean; reason: string };
|
||||
} catch {
|
||||
throw new Error(`Failed to parse eval JSON: ${block.text}`);
|
||||
}
|
||||
|
||||
return { accepted: Boolean(parsed.accepted), reason: parsed.reason ?? "" };
|
||||
}
|
||||
|
||||
async executeWork(requestText: string): Promise<WorkResult> {
|
||||
const message = await anthropic.messages.create({
|
||||
model: this.workModel,
|
||||
max_tokens: 8192,
|
||||
system: `You are Timmy, a capable AI agent. A user has paid for you to handle their request.
|
||||
Fulfill it thoroughly and helpfully. Be concise yet complete.`,
|
||||
messages: [{ role: "user", content: requestText }],
|
||||
} as Parameters<typeof anthropic.messages.create>[0]);
|
||||
|
||||
const block = message.content[0] as Anthropic.TextBlock;
|
||||
if (block.type !== "text") {
|
||||
throw new Error("Unexpected non-text response from work model");
|
||||
}
|
||||
|
||||
return { result: block.text };
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeRequest(request: string): Promise<string> {
|
||||
const message = await anthropic.messages.create({
|
||||
model: WORK_MODEL,
|
||||
max_tokens: 8192,
|
||||
system: `You are Timmy, a capable AI agent. A user has paid for you to handle their request.
|
||||
Do your best to fulfill it thoroughly and helpfully. Be concise yet complete.`,
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: request,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const block = message.content[0];
|
||||
if (block.type !== "text") {
|
||||
throw new Error("Unexpected response type from work model");
|
||||
}
|
||||
|
||||
return block.text;
|
||||
}
|
||||
export const agentService = new AgentService();
|
||||
|
||||
@@ -5,33 +5,87 @@ export interface LNbitsInvoice {
|
||||
paymentRequest: string;
|
||||
}
|
||||
|
||||
export interface LNbitsInvoiceStatus {
|
||||
paid: boolean;
|
||||
paidAt?: Date;
|
||||
export interface LNbitsConfig {
|
||||
url: string;
|
||||
apiKey: string;
|
||||
}
|
||||
|
||||
const paidInvoices = new Set<string>();
|
||||
const stubPaidInvoices = new Set<string>();
|
||||
|
||||
export async function createInvoice(
|
||||
amountSats: number,
|
||||
memo: string,
|
||||
): Promise<LNbitsInvoice> {
|
||||
const paymentHash = randomBytes(32).toString("hex");
|
||||
const paymentRequest = `lnbcrt${amountSats}u1stub_${paymentHash.slice(0, 16)}`;
|
||||
console.log(`[stub] Created invoice: ${amountSats} sats — "${memo}" — hash=${paymentHash}`);
|
||||
return { paymentHash, paymentRequest };
|
||||
}
|
||||
export class LNbitsService {
|
||||
private readonly url: string;
|
||||
private readonly apiKey: string;
|
||||
readonly stubMode: boolean;
|
||||
|
||||
export async function checkInvoicePaid(
|
||||
paymentHash: string,
|
||||
): Promise<LNbitsInvoiceStatus> {
|
||||
if (paidInvoices.has(paymentHash)) {
|
||||
return { paid: true, paidAt: new Date() };
|
||||
constructor(config?: Partial<LNbitsConfig>) {
|
||||
this.url = config?.url ?? process.env.LNBITS_URL ?? "";
|
||||
this.apiKey = config?.apiKey ?? process.env.LNBITS_API_KEY ?? "";
|
||||
this.stubMode = !this.url || !this.apiKey;
|
||||
if (this.stubMode) {
|
||||
console.warn("[LNbitsService] No LNBITS_URL/LNBITS_API_KEY — running in STUB mode. Invoices are simulated.");
|
||||
}
|
||||
}
|
||||
|
||||
async createInvoice(amountSats: number, memo: string): Promise<LNbitsInvoice> {
|
||||
if (this.stubMode) {
|
||||
const paymentHash = randomBytes(32).toString("hex");
|
||||
const paymentRequest = `lnbcrt${amountSats}u1stub_${paymentHash.slice(0, 16)}`;
|
||||
console.log(`[stub] Created invoice: ${amountSats} sats — "${memo}" — hash=${paymentHash}`);
|
||||
return { paymentHash, paymentRequest };
|
||||
}
|
||||
|
||||
const response = await fetch(`${this.url.replace(/\/$/, "")}/api/v1/payments`, {
|
||||
method: "POST",
|
||||
headers: this.headers(),
|
||||
body: JSON.stringify({ out: false, amount: amountSats, memo }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const body = await response.text();
|
||||
throw new Error(`LNbits createInvoice failed (${response.status}): ${body}`);
|
||||
}
|
||||
|
||||
const data = (await response.json()) as {
|
||||
payment_hash: string;
|
||||
payment_request: string;
|
||||
};
|
||||
return { paymentHash: data.payment_hash, paymentRequest: data.payment_request };
|
||||
}
|
||||
|
||||
async checkInvoicePaid(paymentHash: string): Promise<boolean> {
|
||||
if (this.stubMode) {
|
||||
return stubPaidInvoices.has(paymentHash);
|
||||
}
|
||||
|
||||
const response = await fetch(
|
||||
`${this.url.replace(/\/$/, "")}/api/v1/payments/${paymentHash}`,
|
||||
{ method: "GET", headers: this.headers() },
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
const body = await response.text();
|
||||
throw new Error(`LNbits checkInvoice failed (${response.status}): ${body}`);
|
||||
}
|
||||
|
||||
const data = (await response.json()) as { paid: boolean };
|
||||
return data.paid;
|
||||
}
|
||||
|
||||
/** Stub-only helper: mark an invoice as paid for testing/dev flows. */
|
||||
stubMarkPaid(paymentHash: string): void {
|
||||
if (!this.stubMode) {
|
||||
throw new Error("stubMarkPaid called on a real LNbitsService instance");
|
||||
}
|
||||
stubPaidInvoices.add(paymentHash);
|
||||
console.log(`[stub] Marked invoice paid: hash=${paymentHash}`);
|
||||
}
|
||||
|
||||
private headers(): Record<string, string> {
|
||||
return {
|
||||
"Content-Type": "application/json",
|
||||
"X-Api-Key": this.apiKey,
|
||||
};
|
||||
}
|
||||
return { paid: false };
|
||||
}
|
||||
|
||||
export function markInvoicePaid(paymentHash: string): void {
|
||||
paidInvoices.add(paymentHash);
|
||||
console.log(`[stub] Marked invoice paid: hash=${paymentHash}`);
|
||||
}
|
||||
export const lnbitsService = new LNbitsService();
|
||||
|
||||
@@ -1,8 +1,39 @@
|
||||
export const EVAL_FEE_SATS = 10;
|
||||
|
||||
export function computeWorkFeeSats(request: string): number {
|
||||
const len = request.trim().length;
|
||||
if (len <= 100) return 50;
|
||||
if (len <= 300) return 100;
|
||||
return 250;
|
||||
export interface PricingConfig {
|
||||
evalFeeSats?: number;
|
||||
workFeeShortSats?: number;
|
||||
workFeeMediumSats?: number;
|
||||
workFeeLongSats?: number;
|
||||
shortMaxChars?: number;
|
||||
mediumMaxChars?: number;
|
||||
}
|
||||
|
||||
export class PricingService {
|
||||
private readonly evalFee: number;
|
||||
private readonly workFeeShort: number;
|
||||
private readonly workFeeMedium: number;
|
||||
private readonly workFeeLong: number;
|
||||
private readonly shortMax: number;
|
||||
private readonly mediumMax: number;
|
||||
|
||||
constructor(config?: PricingConfig) {
|
||||
this.evalFee = config?.evalFeeSats ?? 10;
|
||||
this.workFeeShort = config?.workFeeShortSats ?? 50;
|
||||
this.workFeeMedium = config?.workFeeMediumSats ?? 100;
|
||||
this.workFeeLong = config?.workFeeLongSats ?? 250;
|
||||
this.shortMax = config?.shortMaxChars ?? 100;
|
||||
this.mediumMax = config?.mediumMaxChars ?? 300;
|
||||
}
|
||||
|
||||
calculateEvalFeeSats(): number {
|
||||
return this.evalFee;
|
||||
}
|
||||
|
||||
calculateWorkFeeSats(requestText: string): number {
|
||||
const len = requestText.trim().length;
|
||||
if (len <= this.shortMax) return this.workFeeShort;
|
||||
if (len <= this.mediumMax) return this.workFeeMedium;
|
||||
return this.workFeeLong;
|
||||
}
|
||||
}
|
||||
|
||||
export const pricingService = new PricingService();
|
||||
|
||||
4
pnpm-lock.yaml
generated
4
pnpm-lock.yaml
generated
@@ -443,6 +443,10 @@ importers:
|
||||
version: 7.1.1
|
||||
|
||||
scripts:
|
||||
dependencies:
|
||||
'@workspace/integrations-anthropic-ai':
|
||||
specifier: workspace:*
|
||||
version: link:../lib/integrations-anthropic-ai
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: 'catalog:'
|
||||
|
||||
20
replit.md
20
replit.md
@@ -48,6 +48,26 @@ Every package extends `tsconfig.base.json` which sets `composite: true`. The roo
|
||||
- `pnpm run build` — runs `typecheck` first, then recursively runs `build` in all packages that define it
|
||||
- `pnpm run typecheck` — runs `tsc --build --emitDeclarationOnly` using project references
|
||||
|
||||
## Environment Variables & Secrets
|
||||
|
||||
### Automatically provisioned (do not set manually)
|
||||
|
||||
| Secret | Purpose |
|
||||
|---|---|
|
||||
| `AI_INTEGRATIONS_ANTHROPIC_BASE_URL` | Replit AI Integrations proxy base URL for Anthropic |
|
||||
| `AI_INTEGRATIONS_ANTHROPIC_API_KEY` | Replit AI Integrations proxy API key (dummy value, auto-managed) |
|
||||
| `DATABASE_URL` | PostgreSQL connection string (Replit-managed) |
|
||||
| `SESSION_SECRET` | Express session secret (Replit-managed) |
|
||||
|
||||
### Required secrets (set via Replit Secrets tab)
|
||||
|
||||
| Secret | Description | Example |
|
||||
|---|---|---|
|
||||
| `LNBITS_URL` | Base URL of your LNbits instance | `https://legend.lnbits.com` |
|
||||
| `LNBITS_API_KEY` | Invoice/Admin API key from your LNbits wallet | `a3f...` |
|
||||
|
||||
> **Note:** If `LNBITS_URL` and `LNBITS_API_KEY` are absent, `LNbitsService` automatically runs in **stub mode** — invoices are simulated in-memory and can be marked paid via `svc.stubMarkPaid(hash)`. This is intentional for development without a Lightning node.
|
||||
|
||||
## Packages
|
||||
|
||||
### `artifacts/api-server` (`@workspace/api-server`)
|
||||
|
||||
@@ -5,8 +5,12 @@
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"hello": "tsx ./src/hello.ts",
|
||||
"smoke": "tsx ./src/smoke.ts",
|
||||
"typecheck": "tsc -p tsconfig.json --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@workspace/integrations-anthropic-ai": "workspace:*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "catalog:",
|
||||
"tsx": "catalog:"
|
||||
|
||||
46
scripts/src/smoke.ts
Normal file
46
scripts/src/smoke.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
/**
|
||||
* Smoke test: confirms LNbitsService and AgentService are reachable.
|
||||
* Run: pnpm --filter @workspace/scripts run smoke
|
||||
*/
|
||||
import { LNbitsService } from "../../artifacts/api-server/src/lib/lnbits.ts";
|
||||
import { AgentService } from "../../artifacts/api-server/src/lib/agent.ts";
|
||||
|
||||
async function smokeLnbits(): Promise<void> {
|
||||
const svc = new LNbitsService();
|
||||
const invoice = await svc.createInvoice(1, "smoke-test");
|
||||
console.log("✓ LNbits createInvoice:", invoice.paymentHash.slice(0, 16), "...");
|
||||
|
||||
const paidBefore = await svc.checkInvoicePaid(invoice.paymentHash);
|
||||
console.log("✓ LNbits checkInvoicePaid (unpaid, expect false):", paidBefore);
|
||||
|
||||
if (svc.stubMode) {
|
||||
svc.stubMarkPaid(invoice.paymentHash);
|
||||
const paidAfter = await svc.checkInvoicePaid(invoice.paymentHash);
|
||||
console.log("✓ LNbits checkInvoicePaid (after stub mark, expect true):", paidAfter);
|
||||
}
|
||||
}
|
||||
|
||||
async function smokeAnthropic(): Promise<void> {
|
||||
const svc = new AgentService();
|
||||
const evalResult = await svc.evaluateRequest("What is 2 + 2?");
|
||||
console.log("✓ AgentService evaluateRequest:", JSON.stringify(evalResult));
|
||||
}
|
||||
|
||||
(async () => {
|
||||
console.log("--- Timmy smoke test ---");
|
||||
try {
|
||||
await smokeLnbits();
|
||||
} catch (err) {
|
||||
console.error("✗ LNbits smoke failed:", err);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
try {
|
||||
await smokeAnthropic();
|
||||
} catch (err) {
|
||||
console.error("✗ Anthropic smoke failed:", err);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
if (!process.exitCode) {
|
||||
console.log("--- All smoke checks passed ---");
|
||||
}
|
||||
})();
|
||||
@@ -5,5 +5,8 @@
|
||||
"rootDir": "src",
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["src"]
|
||||
"include": ["src"],
|
||||
"references": [
|
||||
{ "path": "../lib/integrations-anthropic-ai" }
|
||||
]
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user