Compare commits
6 Commits
step35/498
...
fix/987
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2485b7a708 | ||
|
|
84831942ed | ||
| d1f5d34fd4 | |||
| 891cdb6e94 | |||
| cac5ca630d | |||
|
|
f1c9843376 |
48
luna/README.md
Normal file
48
luna/README.md
Normal file
@@ -0,0 +1,48 @@
|
||||
# LUNA-1: Pink Unicorn Game — Project Scaffolding
|
||||
|
||||
Starter project for Mackenzie's Pink Unicorn Game built with **p5.js 1.9.0**.
|
||||
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
cd luna
|
||||
python3 -m http.server 8080
|
||||
# Visit http://localhost:8080
|
||||
```
|
||||
|
||||
Or simply open `luna/index.html` directly in a browser.
|
||||
|
||||
## Controls
|
||||
|
||||
| Input | Action |
|
||||
|-------|--------|
|
||||
| Tap / Click | Move unicorn toward tap point |
|
||||
| `r` key | Reset unicorn to center |
|
||||
|
||||
## Features
|
||||
|
||||
- Mobile-first touch handling (`touchStarted`)
|
||||
- Easing movement via `lerp`
|
||||
- Particle burst feedback on tap
|
||||
- Pink/unicorn color palette
|
||||
- Responsive canvas (adapts to window resize)
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
luna/
|
||||
├── index.html # p5.js CDN import + canvas container
|
||||
├── sketch.js # Main game logic and rendering
|
||||
├── style.css # Pink/unicorn theme, responsive layout
|
||||
└── README.md # This file
|
||||
```
|
||||
|
||||
## Verification
|
||||
|
||||
Open in browser → canvas renders a white unicorn with a pink mane. Tap anywhere: unicorn glides toward the tap position with easing, and pink/magic-colored particles burst from the tap point.
|
||||
|
||||
## Technical Notes
|
||||
|
||||
- p5.js loaded from CDN (no build step)
|
||||
- `colorMode(RGB, 255)`; palette defined in code
|
||||
- Particles are simple fading circles; removed when `life <= 0`
|
||||
18
luna/index.html
Normal file
18
luna/index.html
Normal file
@@ -0,0 +1,18 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>LUNA-3: Simple World — Floating Islands</title>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/1.9.0/p5.min.js"></script>
|
||||
<link rel="stylesheet" href="style.css" />
|
||||
</head>
|
||||
<body>
|
||||
<div id="luna-container"></div>
|
||||
<div id="hud">
|
||||
<span id="score">Crystals: 0/0</span>
|
||||
<span id="position"></span>
|
||||
</div>
|
||||
<script src="sketch.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
289
luna/sketch.js
Normal file
289
luna/sketch.js
Normal file
@@ -0,0 +1,289 @@
|
||||
/**
|
||||
* LUNA-3: Simple World — Floating Islands & Collectible Crystals
|
||||
* Builds on LUNA-1 scaffold (unicorn tap-follow) + LUNA-2 actions
|
||||
*
|
||||
* NEW: Floating platforms + collectible crystals with particle bursts
|
||||
*/
|
||||
|
||||
let particles = [];
|
||||
let unicornX, unicornY;
|
||||
let targetX, targetY;
|
||||
|
||||
// Platforms: floating islands at various heights with horizontal ranges
|
||||
const islands = [
|
||||
{ x: 100, y: 350, w: 150, h: 20, color: [100, 200, 150] }, // left island
|
||||
{ x: 350, y: 280, w: 120, h: 20, color: [120, 180, 200] }, // middle-high island
|
||||
{ x: 550, y: 320, w: 140, h: 20, color: [200, 180, 100] }, // right island
|
||||
{ x: 200, y: 180, w: 180, h: 20, color: [180, 140, 200] }, // top-left island
|
||||
{ x: 500, y: 120, w: 100, h: 20, color: [140, 220, 180] }, // top-right island
|
||||
];
|
||||
|
||||
// Collectible crystals on islands
|
||||
const crystals = [];
|
||||
islands.forEach((island, i) => {
|
||||
// 2–3 crystals per island, placed near center
|
||||
const count = 2 + floor(random(2));
|
||||
for (let j = 0; j < count; j++) {
|
||||
crystals.push({
|
||||
x: island.x + 30 + random(island.w - 60),
|
||||
y: island.y - 30 - random(20),
|
||||
size: 8 + random(6),
|
||||
hue: random(280, 340), // pink/purple range
|
||||
collected: false,
|
||||
islandIndex: i
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
let collectedCount = 0;
|
||||
const TOTAL_CRYSTALS = crystals.length;
|
||||
|
||||
// Pink/unicorn palette
|
||||
const PALETTE = {
|
||||
background: [255, 210, 230], // light pink (overridden by gradient in draw)
|
||||
unicorn: [255, 182, 193], // pale pink/white
|
||||
horn: [255, 215, 0], // gold
|
||||
mane: [255, 105, 180], // hot pink
|
||||
eye: [255, 20, 147], // deep pink
|
||||
sparkle: [255, 105, 180],
|
||||
island: [100, 200, 150],
|
||||
};
|
||||
|
||||
function setup() {
|
||||
const container = document.getElementById('luna-container');
|
||||
const canvas = createCanvas(600, 500);
|
||||
canvas.parent('luna-container');
|
||||
unicornX = width / 2;
|
||||
unicornY = height - 60; // start on ground (bottom platform equivalent)
|
||||
targetX = unicornX;
|
||||
targetY = unicornY;
|
||||
noStroke();
|
||||
addTapHint();
|
||||
}
|
||||
|
||||
function draw() {
|
||||
// Gradient sky background
|
||||
for (let y = 0; y < height; y++) {
|
||||
const t = y / height;
|
||||
const r = lerp(26, 15, t); // #1a1a2e → #0f3460
|
||||
const g = lerp(26, 52, t);
|
||||
const b = lerp(46, 96, t);
|
||||
stroke(r, g, b);
|
||||
line(0, y, width, y);
|
||||
}
|
||||
|
||||
// Draw islands (floating platforms with subtle shadow)
|
||||
islands.forEach(island => {
|
||||
push();
|
||||
// Shadow
|
||||
fill(0, 0, 0, 40);
|
||||
ellipse(island.x + island.w/2 + 5, island.y + 5, island.w + 10, island.h + 6);
|
||||
// Island body
|
||||
fill(island.color[0], island.color[1], island.color[2]);
|
||||
ellipse(island.x + island.w/2, island.y, island.w, island.h);
|
||||
// Top highlight
|
||||
fill(255, 255, 255, 60);
|
||||
ellipse(island.x + island.w/2, island.y - island.h/3, island.w * 0.6, island.h * 0.3);
|
||||
pop();
|
||||
});
|
||||
|
||||
// Draw crystals (glowing collectibles)
|
||||
crystals.forEach(c => {
|
||||
if (c.collected) return;
|
||||
push();
|
||||
translate(c.x, c.y);
|
||||
// Glow aura
|
||||
const glow = color(`hsla(${c.hue}, 80%, 70%, 0.4)`);
|
||||
noStroke();
|
||||
fill(glow);
|
||||
ellipse(0, 0, c.size * 2.2, c.size * 2.2);
|
||||
// Crystal body (diamond shape)
|
||||
const ccol = color(`hsl(${c.hue}, 90%, 75%)`);
|
||||
fill(ccol);
|
||||
beginShape();
|
||||
vertex(0, -c.size);
|
||||
vertex(c.size * 0.6, 0);
|
||||
vertex(0, c.size);
|
||||
vertex(-c.size * 0.6, 0);
|
||||
endShape(CLOSE);
|
||||
// Inner sparkle
|
||||
fill(255, 255, 255, 180);
|
||||
ellipse(0, 0, c.size * 0.5, c.size * 0.5);
|
||||
pop();
|
||||
});
|
||||
|
||||
// Unicorn smooth movement towards target
|
||||
unicornX = lerp(unicornX, targetX, 0.08);
|
||||
unicornY = lerp(unicornY, targetY, 0.08);
|
||||
|
||||
// Constrain unicorn to screen bounds
|
||||
unicornX = constrain(unicornX, 40, width - 40);
|
||||
unicornY = constrain(unicornY, 40, height - 40);
|
||||
|
||||
// Draw sparkles
|
||||
drawSparkles();
|
||||
|
||||
// Draw the unicorn
|
||||
drawUnicorn(unicornX, unicornY);
|
||||
|
||||
// Collection detection
|
||||
for (let c of crystals) {
|
||||
if (c.collected) continue;
|
||||
const d = dist(unicornX, unicornY, c.x, c.y);
|
||||
if (d < 35) {
|
||||
c.collected = true;
|
||||
collectedCount++;
|
||||
createCollectionBurst(c.x, c.y, c.hue);
|
||||
}
|
||||
}
|
||||
|
||||
// Update particles
|
||||
updateParticles();
|
||||
|
||||
// Update HUD
|
||||
document.getElementById('score').textContent = `Crystals: ${collectedCount}/${TOTAL_CRYSTALS}`;
|
||||
document.getElementById('position').textContent = `(${floor(unicornX)}, ${floor(unicornY)})`;
|
||||
}
|
||||
|
||||
function drawUnicorn(x, y) {
|
||||
push();
|
||||
translate(x, y);
|
||||
|
||||
// Body
|
||||
noStroke();
|
||||
fill(PALETTE.unicorn);
|
||||
ellipse(0, 0, 60, 40);
|
||||
|
||||
// Head
|
||||
ellipse(30, -20, 30, 25);
|
||||
|
||||
// Mane (flowing)
|
||||
fill(PALETTE.mane);
|
||||
for (let i = 0; i < 5; i++) {
|
||||
ellipse(-10 + i * 12, -50, 12, 25);
|
||||
}
|
||||
|
||||
// Horn
|
||||
push();
|
||||
translate(30, -35);
|
||||
rotate(-PI / 6);
|
||||
fill(PALETTE.horn);
|
||||
triangle(0, 0, -8, -35, 8, -35);
|
||||
pop();
|
||||
|
||||
// Eye
|
||||
fill(PALETTE.eye);
|
||||
ellipse(38, -22, 8, 8);
|
||||
|
||||
// Legs
|
||||
stroke(PALETTE.unicorn[0] - 40);
|
||||
strokeWeight(6);
|
||||
line(-20, 20, -20, 45);
|
||||
line(20, 20, 20, 45);
|
||||
|
||||
pop();
|
||||
}
|
||||
|
||||
function drawSparkles() {
|
||||
// Random sparkles around the unicorn when moving
|
||||
if (abs(targetX - unicornX) > 1 || abs(targetY - unicornY) > 1) {
|
||||
for (let i = 0; i < 3; i++) {
|
||||
let angle = random(TWO_PI);
|
||||
let r = random(20, 50);
|
||||
let sx = unicornX + cos(angle) * r;
|
||||
let sy = unicornY + sin(angle) * r;
|
||||
stroke(PALETTE.sparkle[0], PALETTE.sparkle[1], PALETTE.sparkle[2], 150);
|
||||
strokeWeight(2);
|
||||
point(sx, sy);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function createCollectionBurst(x, y, hue) {
|
||||
// Burst of particles spiraling outward
|
||||
for (let i = 0; i < 20; i++) {
|
||||
let angle = random(TWO_PI);
|
||||
let speed = random(2, 6);
|
||||
particles.push({
|
||||
x: x,
|
||||
y: y,
|
||||
vx: cos(angle) * speed,
|
||||
vy: sin(angle) * speed,
|
||||
life: 60,
|
||||
color: `hsl(${hue + random(-20, 20)}, 90%, 70%)`,
|
||||
size: random(3, 6)
|
||||
});
|
||||
}
|
||||
// Bonus sparkle ring
|
||||
for (let i = 0; i < 12; i++) {
|
||||
let angle = random(TWO_PI);
|
||||
particles.push({
|
||||
x: x,
|
||||
y: y,
|
||||
vx: cos(angle) * 4,
|
||||
vy: sin(angle) * 4,
|
||||
life: 40,
|
||||
color: 'rgba(255, 215, 0, 0.9)',
|
||||
size: 4
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function updateParticles() {
|
||||
for (let i = particles.length - 1; i >= 0; i--) {
|
||||
let p = particles[i];
|
||||
p.x += p.vx;
|
||||
p.y += p.vy;
|
||||
p.vy += 0.1; // gravity
|
||||
p.life--;
|
||||
p.vx *= 0.95;
|
||||
p.vy *= 0.95;
|
||||
if (p.life <= 0) {
|
||||
particles.splice(i, 1);
|
||||
continue;
|
||||
}
|
||||
push();
|
||||
stroke(p.color);
|
||||
strokeWeight(p.size);
|
||||
point(p.x, p.y);
|
||||
pop();
|
||||
}
|
||||
}
|
||||
|
||||
// Tap/click handler
|
||||
function mousePressed() {
|
||||
targetX = mouseX;
|
||||
targetY = mouseY;
|
||||
addPulseAt(targetX, targetY);
|
||||
}
|
||||
|
||||
function addTapHint() {
|
||||
// Pre-spawn some floating hint particles
|
||||
for (let i = 0; i < 5; i++) {
|
||||
particles.push({
|
||||
x: random(width),
|
||||
y: random(height),
|
||||
vx: random(-0.5, 0.5),
|
||||
vy: random(-0.5, 0.5),
|
||||
life: 200,
|
||||
color: 'rgba(233, 69, 96, 0.5)',
|
||||
size: 3
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function addPulseAt(x, y) {
|
||||
// Expanding ring on tap
|
||||
for (let i = 0; i < 12; i++) {
|
||||
let angle = (TWO_PI / 12) * i;
|
||||
particles.push({
|
||||
x: x,
|
||||
y: y,
|
||||
vx: cos(angle) * 3,
|
||||
vy: sin(angle) * 3,
|
||||
life: 30,
|
||||
color: 'rgba(233, 69, 96, 0.7)',
|
||||
size: 3
|
||||
});
|
||||
}
|
||||
}
|
||||
32
luna/style.css
Normal file
32
luna/style.css
Normal file
@@ -0,0 +1,32 @@
|
||||
body {
|
||||
margin: 0;
|
||||
overflow: hidden;
|
||||
background: linear-gradient(to bottom, #1a1a2e, #16213e, #0f3460);
|
||||
font-family: 'Courier New', monospace;
|
||||
color: #e94560;
|
||||
}
|
||||
|
||||
#luna-container {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100vw;
|
||||
height: 100vh;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
#hud {
|
||||
position: fixed;
|
||||
top: 10px;
|
||||
left: 10px;
|
||||
background: rgba(0, 0, 0, 0.6);
|
||||
padding: 8px 12px;
|
||||
border-radius: 4px;
|
||||
font-size: 14px;
|
||||
z-index: 100;
|
||||
border: 1px solid #e94560;
|
||||
}
|
||||
|
||||
#score { font-weight: bold; }
|
||||
128
specs/fleet-operator-incentives.md
Normal file
128
specs/fleet-operator-incentives.md
Normal file
@@ -0,0 +1,128 @@
|
||||
# Fleet Operator Incentives & Partner Program
|
||||
*Epic IV — Human Capital & Incentives (Mogul Influence roadmap steps XII, XIII, XV)*
|
||||
|
||||
## Operator Role Definition
|
||||
### Primary Responsibilities
|
||||
- Deploy and maintain sovereign AI agent fleets on VPS nodes
|
||||
- Monitor fleet health, uptime, and performance metrics
|
||||
- Execute dispatched tasks from the Timmy Foundation (burn sessions, cron jobs, PR merges)
|
||||
- Maintain fleet identity registry and rotate credentials per security policy
|
||||
- Report operational metrics weekly (uptime %, completed tasks, resource usage)
|
||||
|
||||
### Qualifications
|
||||
- Linux system administration (systemd, ssh, git, basic networking)
|
||||
- Familiarity with AI agent frameworks (Hermes Agent preferred)
|
||||
- Reliable VPS infrastructure (minimum: 2 vCPU, 4GB RAM, 50GB SSD)
|
||||
- Stable internet connection with <50ms latency to foundation services
|
||||
|
||||
## Compensation Model
|
||||
### Base Rate
|
||||
- **$150/month** per operator for up to 5 VPS nodes managed
|
||||
- Additional $25/month per node beyond 5 (max 10 nodes per operator)
|
||||
|
||||
### Performance Bonuses
|
||||
| Metric | Target | Bonus |
|
||||
|--------|---------|-------|
|
||||
| Fleet uptime | >99.5% monthly | +$50 |
|
||||
| Task completion rate | >95% successful dispatches | +$30 |
|
||||
| Response time | <30min for critical alerts | +$20 |
|
||||
| Churn prevention | Retain operators 6+ months | +$100 quarterly |
|
||||
|
||||
### Payment Schedule
|
||||
- Monthly via stablecoin (USDC/USDT) on preferred chain
|
||||
- Bonuses paid within 7 days of month-end verification
|
||||
- Operators provide wallet address during onboarding
|
||||
|
||||
## Partner Program (20% Commission)
|
||||
### Partner Role
|
||||
- Refer new operators to the Timmy Foundation fleet
|
||||
- Earn 20% of operator base compensation for first 12 months
|
||||
- Provide mentorship during operator onboarding (first 30 days)
|
||||
|
||||
### Commission Structure
|
||||
- New operator base $150/mo → Partner earns $30/mo for 12 months
|
||||
- Bonus performance passes through (partner earns 20% of operator bonuses)
|
||||
- Minimum: 2 qualifying operators referred before earning partner status
|
||||
|
||||
### Partner Requirements
|
||||
- Must be certified operator for 3+ months with >99% uptime
|
||||
- Maintain active communication with referred operators
|
||||
- Submit monthly partner report (format: `specs/templates/partner-report.md`)
|
||||
|
||||
## Quality Standards
|
||||
### Operational Standards
|
||||
- [ ] Fleet uptime ≥99.5% monthly
|
||||
- [ ] Critical alerts acknowledged within 30 minutes
|
||||
- [ ] Security: no credential reuse across nodes
|
||||
- [ ] Weekly metrics report submitted by Monday 09:00 UTC
|
||||
- [ ] Adhere to sovereign AI principles (no data exfiltration, local-first)
|
||||
|
||||
### Code Quality (for agent modifications)
|
||||
- [ ] All changes committed with signed-off-by
|
||||
- [ ] PRs reference Gitea issue/modal number
|
||||
- [ ] Tests pass before merge (where applicable)
|
||||
- [ ] No hardcoded secrets in commits
|
||||
|
||||
### Communication Standards
|
||||
- [ ] Respond to Timmy Foundation pings within 24 hours
|
||||
- [ ] Use professional, concise language in issues/PRs
|
||||
- [ ] Report outages immediately via Telegram/Discord alert channel
|
||||
|
||||
## Onboarding & Certification
|
||||
### Phase 1: Application
|
||||
- Submit operator application (template: `specs/templates/operator-application.md`)
|
||||
- Provide VPS specifications and location
|
||||
- Sign operator agreement
|
||||
|
||||
### Phase 2: Training
|
||||
- Complete Hermes Agent training (5 modules)
|
||||
- Pass fleet operations quiz (80% passing score)
|
||||
- Shadow certified operator for 1 week
|
||||
|
||||
### Phase 3: Certification
|
||||
- Deploy 2-node test fleet
|
||||
- Successfully complete 10 dispatched tasks
|
||||
- Certified operator reviews and signs off
|
||||
|
||||
### Phase 4: Active Status
|
||||
- Added to operator registry
|
||||
- Granted access to fleet management tools
|
||||
- Begin earning base compensation
|
||||
|
||||
## Exit & Transition Protocol
|
||||
### Voluntary Exit
|
||||
1. Submit 30-day notice via Gitea issue label `exit-notice`
|
||||
2. Complete transition checklist:
|
||||
- [ ] Transfer all node access to Foundation or successor
|
||||
- [ ] Hand over active tasks in progress
|
||||
- [ ] Return any Foundation-owned credentials/hardware
|
||||
- [ ] Final metrics report submitted
|
||||
3. Receive exit payment within 7 days
|
||||
|
||||
### Involuntary Termination (for cause)
|
||||
- Repeated uptime <97% (3 consecutive months)
|
||||
- Security breach or credential exposure
|
||||
- Violation of sovereign AI principles
|
||||
- Unresponsive >72 hours without prior notice
|
||||
|
||||
Terminated operators:
|
||||
- Access revoked immediately
|
||||
- Final payment pro-rated to last active day
|
||||
- May reapply after 6 months with improvement plan
|
||||
|
||||
### Succession Planning
|
||||
- Each operator mentors 1 junior operator within first 6 months
|
||||
- Documentation of all processes in `specs/fleet-ops-runbook.md`
|
||||
- No single point of failure: min 2 operators per region
|
||||
|
||||
## Success Criteria (6-Month Targets)
|
||||
- [ ] 3-5 active certified operators
|
||||
- [ ] Operator churn <10% annually
|
||||
- [ ] Fleet uptime >99.5%
|
||||
- [ ] Partner channel >30% of new operator leads
|
||||
|
||||
## References
|
||||
- Parent epic: Mogul Influence 17-step roadmap (steps XII, XIII, XV)
|
||||
- Issue: #987
|
||||
- Templates: `specs/templates/operator-*.md`
|
||||
- Runbook: `specs/fleet-ops-runbook.md` (future)
|
||||
59
specs/fleet-ops-runbook.md
Normal file
59
specs/fleet-ops-runbook.md
Normal file
@@ -0,0 +1,59 @@
|
||||
# Fleet Operations Runbook
|
||||
*Standard operating procedures for Timmy Foundation fleet operators*
|
||||
|
||||
## Daily Checklist
|
||||
- [ ] Check fleet health: `tmux list-sessions` (should show BURN, BURN2, FORGE active)
|
||||
- [ ] Verify gateway running: `systemctl status ai.hermes.gateway --no-pager`
|
||||
- [ ] Check disk space: `df -h /` (keep >15% free)
|
||||
- [ ] Review overnight cron results in `~/.hermes/cron/jobs/`
|
||||
|
||||
## Weekly Tasks
|
||||
- [ ] Generate fleet metrics report (`scripts/fleet-metrics.sh`)
|
||||
- [ ] Rotate any expired credentials (check `~/.hermes/fleet-dispatch-state.json`)
|
||||
- [ ] Review open PRs in Timmy Foundation repos
|
||||
- [ ] Submit weekly report by Monday 09:00 UTC
|
||||
|
||||
## Alert Response Protocol
|
||||
### Critical (respond <30 min)
|
||||
1. Gateway down: `sudo systemctl restart ai.hermes.gateway`
|
||||
2. Disk >90% full: `scripts/cleanup-disk.sh`
|
||||
3. Fleet dispatch failing: check `/tmp/hermes/dispatch-queue.json`
|
||||
|
||||
### Warning (respond <4 hours)
|
||||
1. Uptime <99.5%: investigate tmux panes with `tmux attach -t BURN`
|
||||
2. Failed cron jobs: check logs in `~/.hermes/cron/jobs/`
|
||||
3. Agent loop errors: review session transcripts
|
||||
|
||||
## Common Fixes
|
||||
### Restart stuck tmux pane
|
||||
```bash
|
||||
tmux send-keys -t BURN:0 C-c
|
||||
tmux send-keys -t BURN:0 "hermes chat --yolo" Enter
|
||||
```
|
||||
|
||||
### Clear dispatch queue
|
||||
```bash
|
||||
rm /tmp/hermes/dispatch-queue.json
|
||||
# Watchdog will recreate on next cycle
|
||||
```
|
||||
|
||||
### Update hermes-agent
|
||||
```bash
|
||||
cd ~/hermes-agent && git pull origin main && pip install -e ".[all]"
|
||||
```
|
||||
|
||||
## Emergency Escalation
|
||||
- **Telegram**: @Rockachopa (primary)
|
||||
- **Gitea Issue**: label `operator-alert` + mention @Rockachopa
|
||||
- **Discord**: #fleet-ops-alerts channel
|
||||
|
||||
## Security Rules
|
||||
- Never share VPS SSH keys
|
||||
- Never commit credentials to git
|
||||
- Rotate tokens every 90 days
|
||||
- Report suspicious activity immediately
|
||||
|
||||
## Contact
|
||||
- **Operator Handbook**: `specs/fleet-operator-incentives.md`
|
||||
- **Templates**: `specs/templates/operator-*.md`
|
||||
- **Foundation Forge**: https://forge.alexanderwhitestone.com/Timmy_Foundation
|
||||
44
specs/templates/operator-application.md
Normal file
44
specs/templates/operator-application.md
Normal file
@@ -0,0 +1,44 @@
|
||||
# Fleet Operator Application
|
||||
*Submit completed form as a new Gitea issue with label `operator-application`*
|
||||
|
||||
## Personal Information
|
||||
- **Name / Handle**:
|
||||
- **Contact Email**:
|
||||
- **Telegram/Discord Handle**:
|
||||
- **Wallet Address (USDC/USDT)**:
|
||||
- **Timezone**:
|
||||
|
||||
## Infrastructure
|
||||
- **VPS Provider**: (e.g., DigitalOcean, Vultr, Hetzner)
|
||||
- **Server Location**: (datacenter region)
|
||||
- **Specs**: vCPU count, RAM, Storage, Bandwidth
|
||||
- **OS**: (Ubuntu 22.04 LTS preferred)
|
||||
- **Static IP**: Yes / No
|
||||
|
||||
## Experience
|
||||
- [ ] Linux system administration (2+ years)
|
||||
- [ ] Git / GitHub / Gitea usage
|
||||
- [ ] Docker / container orchestration
|
||||
- [ ] AI agent frameworks (Hermes, OpenAI, etc.)
|
||||
- [ ] Prior VPS fleet management
|
||||
|
||||
### Relevant Experience (describe)
|
||||
*Briefly describe your background with fleet ops, sysadmin, or AI agents:*
|
||||
|
||||
## Commitment
|
||||
- **Hours per week available**:
|
||||
- **Can maintain 99.5% uptime?** Yes / No
|
||||
- **Agree to 30-day notice for exit?** Yes / No
|
||||
- **Agree to sovereign AI principles (no data exfiltration)?** Yes / No
|
||||
|
||||
## References
|
||||
- GitHub/Gitea username:
|
||||
- Any prior work with Timmy Foundation? (link issues/PRs)
|
||||
|
||||
## Acknowledgment
|
||||
I understand I will start at $150/month base rate, with bonuses available for performance. I agree to the Quality Standards and Exit Protocol defined in `specs/fleet-operator-incentives.md`.
|
||||
|
||||
**Signature** (type name): _________________ **Date**: _________
|
||||
|
||||
---
|
||||
*Send completed application to: https://forge.alexanderwhitestone.com/Timmy_Foundation/timmy-home/issues/new*
|
||||
38
specs/templates/partner-report.md
Normal file
38
specs/templates/partner-report.md
Normal file
@@ -0,0 +1,38 @@
|
||||
# Partner Monthly Report
|
||||
*Submit by the 5th of each month for commission payments*
|
||||
|
||||
## Partner Info
|
||||
- **Partner Name**:
|
||||
- **Month/Year**:
|
||||
- **Wallet Address**:
|
||||
|
||||
## Referred Operators
|
||||
| Operator Handle | Start Date | Monthly Base | Commission (20%) | Status |
|
||||
|----------------|------------|--------------|-------------------|--------|
|
||||
| | | $150 | $30 | active / churned |
|
||||
| | | $150 | $30 | active / churned |
|
||||
| | | $150 | $30 | active / churned |
|
||||
|
||||
**Total Commission Due**: $______
|
||||
|
||||
## Mentorship Log
|
||||
*Confirm you provided mentorship to each referred operator in the first 30 days:*
|
||||
- [ ] Operator 1: mentored (dates: ____ to ____)
|
||||
- [ ] Operator 2: mentored (dates: ____ to ____)
|
||||
- [ ] Operator 3: mentored (dates: ____ to ____)
|
||||
|
||||
## Partner Performance
|
||||
- Total active operators referred:
|
||||
- Average operator uptime this month: ______%
|
||||
- Any operator churn? Yes / No (explain: )
|
||||
|
||||
## Self-Assessment
|
||||
- [ ] I maintained >99% personal fleet uptime
|
||||
- [ ] I responded to Foundation pings within 24 hours
|
||||
- [ ] I submitted this report on time
|
||||
|
||||
## Notes
|
||||
*Any issues, concerns, or operator feedback:*
|
||||
|
||||
---
|
||||
*Submit as comment on your partner Gitea issue or via Telegram to @Rockachopa*
|
||||
@@ -1,54 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Smoke test for load_cap_enforcer.py — validates structure and dry-run path.
|
||||
|
||||
Refs: timmy-home #498
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
SCRIPT = Path(__file__).parent.parent / "timmy-config" / "bin" / "load_cap_enforcer.py"
|
||||
|
||||
|
||||
def test_script_exists_and_is_executable():
|
||||
assert SCRIPT.exists(), f"Script not found: {SCRIPT}"
|
||||
assert os.access(SCRIPT, os.X_OK), "Script not executable"
|
||||
|
||||
|
||||
def test_dry_run_help():
|
||||
result = subprocess.run([sys.executable, str(SCRIPT), "--help"], capture_output=True, text=True)
|
||||
assert result.returncode == 0
|
||||
assert "--dry-run" in result.stdout
|
||||
assert "--cap" in result.stdout
|
||||
assert "Enforce open-issue load cap" in result.stdout
|
||||
|
||||
|
||||
def test_dry_run_with_mocks(monkeypatch):
|
||||
"""Test dry-run path with mocked Gitea data — checks summary generation."""
|
||||
# Create a tiny stub script that imports the module and exercises core functions
|
||||
import importlib.util
|
||||
spec = importlib.util.spec_from_file_location("load_cap_enforcer", SCRIPT)
|
||||
mod = importlib.util.module_from_spec(spec)
|
||||
# Load but don't execute main yet — just verify module structure
|
||||
# We'll parse the module source for expected symbols
|
||||
source = SCRIPT.read_text()
|
||||
assert "fetch_all_open_issues" in source
|
||||
assert "build_summary" in source
|
||||
assert "unassignment_map" in source
|
||||
assert "COMMENT_TEMPLATE" in source
|
||||
assert "Unassigned from @{assignee} due to load cap" in source
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Run minimal smoke checks when invoked directly
|
||||
test_script_exists_and_is_executable()
|
||||
print("✓ Script exists and is executable")
|
||||
test_dry_run_help()
|
||||
print("✓ --help works")
|
||||
test_dry_run_with_mocks(type('obj', (object,), {'assert': lambda *a: True})())
|
||||
print("✓ Core structure verified")
|
||||
print("\nAll smoke tests passed.")
|
||||
|
||||
@@ -1,210 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Open-Load Cap Enforcement — Audit-B3
|
||||
|
||||
Scans multiple repos for open issues, enforces a per-agent open-issue cap,
|
||||
auto-unassigns overflow (oldest first), and posts a summary.
|
||||
|
||||
Acceptance (timmy-home #498):
|
||||
- Lives in timmy-config/bin/load_cap_enforcer.py
|
||||
- Scans timmy-home, timmy-config, the-nexus, hermes-agent
|
||||
- Cap: 25 open issues per agent (configurable)
|
||||
- Unassign oldest overflow, comment on each
|
||||
- Dry-run first, then live; summary posted on parent issue #495
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
# ── Configuration ─────────────────────────────────────────────────────────────
|
||||
GITEA_BASE = "https://forge.alexanderwhitestone.com/api/v1"
|
||||
ORG = "Timmy_Foundation"
|
||||
REPOS = ["timmy-home", "timmy-config", "the-nexus", "hermes-agent"]
|
||||
TOKEN_PATH = Path.home() / ".config" / "gitea" / "token"
|
||||
DEFAULT_CAP = 25
|
||||
COMMENT_TEMPLATE = "Unassigned from @{{assignee}} due to load cap. Available for pickup."
|
||||
|
||||
|
||||
def load_token() -> str:
|
||||
if TOKEN_PATH.exists():
|
||||
return TOKEN_PATH.read_text().strip()
|
||||
tok = os.environ.get("GITEA_TOKEN", "")
|
||||
if tok:
|
||||
return tok
|
||||
sys.exit("ERROR: Gitea token not found at ~/.config/gitea/token or GITEA_TOKEN env")
|
||||
|
||||
|
||||
def api(method: str, path: str, token: str, data=None):
|
||||
url = f"{GITEA_BASE}{path}"
|
||||
body = json.dumps(data).encode() if data else None
|
||||
headers = {"Authorization": f"token {token}"}
|
||||
if body:
|
||||
headers["Content-Type"] = "application/json"
|
||||
req = urllib.request.Request(url, data=body, headers=headers, method=method)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=30) as resp:
|
||||
return json.loads(resp.read()), resp.status
|
||||
except urllib.error.HTTPError as e:
|
||||
err = e.read().decode() if e.fp else str(e)
|
||||
print(f" API {e.code}: {err}", file=sys.stderr)
|
||||
return None, e.code
|
||||
except Exception as e:
|
||||
print(f" Request error: {e}", file=sys.stderr)
|
||||
return None, None
|
||||
|
||||
|
||||
def fetch_all_open_issues(token: str):
|
||||
all_issues = []
|
||||
for repo in REPOS:
|
||||
page = 1
|
||||
while True:
|
||||
data, status = api("GET", f"/repos/{ORG}/{repo}/issues?state=open&page={page}&limit=50", token)
|
||||
if status != 200 or not data:
|
||||
break
|
||||
all_issues.extend(data)
|
||||
if len(data) < 50:
|
||||
break
|
||||
page += 1
|
||||
return all_issues
|
||||
|
||||
|
||||
def build_summary(by_agent: dict, unassignment_map: dict):
|
||||
lines = []
|
||||
lines.append("Agent | Before | After | Unassigned Count")
|
||||
lines.append("-" * 50)
|
||||
for agent in sorted(by_agent.keys()):
|
||||
before = by_agent[agent]["before"]
|
||||
after = by_agent[agent]["after"]
|
||||
unassigned = len(unassignment_map.get(agent, []))
|
||||
lines.append(f"@{agent} | {before} | {after} | {unassigned}")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Enforce open-issue load cap per agent")
|
||||
parser.add_argument("--dry-run", action="store_true", help="Report without making changes")
|
||||
parser.add_argument("--cap", type=int, default=DEFAULT_CAP, help=f"Max open issues per agent (default: {DEFAULT_CAP})")
|
||||
parser.add_argument("--output", type=str, default=None, help="Write summary to file")
|
||||
parser.add_argument("--comment-on", type=int, default=None, help="Post summary as comment on timmy-home issue N")
|
||||
args = parser.parse_args()
|
||||
|
||||
token = load_token()
|
||||
print(f"Fetching open issues from {', '.join(REPOS)} ...")
|
||||
issues = fetch_all_open_issues(token)
|
||||
print(f"Fetched {len(issues)} open issues.")
|
||||
|
||||
# Group by assignee
|
||||
by_agent = defaultdict(lambda: {"before": 0, "issues": []})
|
||||
for iss in issues:
|
||||
for a in (iss.get("assignees") or []):
|
||||
login = a.get("login")
|
||||
if login:
|
||||
by_agent[login]["issues"].append(iss)
|
||||
by_agent[login]["before"] += 1
|
||||
|
||||
print(f"\nAgents with open issues: {list(by_agent.keys())}")
|
||||
for agent, d in sorted(by_agent.items()):
|
||||
print(f" @{agent}: {d['before']} issues")
|
||||
|
||||
# Identify overflow
|
||||
unassignment_map = defaultdict(list)
|
||||
for agent, d in by_agent.items():
|
||||
count = d["before"]
|
||||
if count > args.cap:
|
||||
overflow = count - args.cap
|
||||
issues_sorted = sorted(d["issues"], key=lambda i: i.get("created_at", ""))
|
||||
unassignment_map[agent] = issues_sorted[:overflow]
|
||||
print(f"\n@{agent} exceeds cap ({count} > {args.cap}); will unassign {overflow} oldest issue(s):")
|
||||
for iss in issues_sorted[:overflow]:
|
||||
print(f" - #{iss['number']}: {iss.get('title', '')[:50]}")
|
||||
|
||||
# Dry-run: just show summary and exit
|
||||
if args.dry_run:
|
||||
print("\n=== DRY RUN — no changes made ===")
|
||||
# For dry-run, after = before (no changes)
|
||||
for agent in by_agent:
|
||||
by_agent[agent]["after"] = by_agent[agent]["before"]
|
||||
summary = build_summary(by_agent, unassignment_map)
|
||||
print("\n" + summary)
|
||||
if args.output:
|
||||
Path(args.output).write_text(summary)
|
||||
print(f"\nSummary written to {args.output}")
|
||||
return 0
|
||||
|
||||
# LIVE: perform unassignments and comments (concurrent)
|
||||
print("\n=== LIVE RUN — executing ===")
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
import threading
|
||||
lock = threading.Lock()
|
||||
tasks = []
|
||||
for agent, issues_to_unassign in unassignment_map.items():
|
||||
for iss in issues_to_unassign:
|
||||
issue_num = iss["number"]
|
||||
repo_name = next(
|
||||
(r for r in REPOS if f"/{r}/issues/" in iss.get("html_url", "")), REPOS[0]
|
||||
)
|
||||
tasks.append((agent, issue_num, repo_name, iss))
|
||||
print(f"Total unassignment tasks: {len(tasks)}")
|
||||
def do_task(agent, issue_num, repo_name, iss):
|
||||
# Unassign
|
||||
_, status1 = api("PATCH", f"/repos/{ORG}/{repo_name}/issues/{issue_num}", token, {"assignees": []})
|
||||
if status1 not in (200, 201, 204):
|
||||
return (agent, issue_num, repo_name, False, f"unassign HTTP {status1}")
|
||||
# Comment
|
||||
comment_body = COMMENT_TEMPLATE.format(assignee=agent)
|
||||
_, status2 = api("POST", f"/repos/{ORG}/{repo_name}/issues/{issue_num}/comments", token, {"body": comment_body})
|
||||
if status2 not in (200, 201):
|
||||
return (agent, issue_num, repo_name, True, f"unassigned but comment HTTP {status2}")
|
||||
return (agent, issue_num, repo_name, True, "OK")
|
||||
completed = 0
|
||||
with ThreadPoolExecutor(max_workers=12) as executor:
|
||||
futures = [executor.submit(do_task, a, n, r, i) for (a, n, r, i) in tasks]
|
||||
for fut in as_completed(futures):
|
||||
agent, num, repo, ok, msg = fut.result()
|
||||
with lock:
|
||||
completed += 1
|
||||
if completed % 50 == 0:
|
||||
print(f" Progress: {completed}/{len(tasks)}")
|
||||
if ok:
|
||||
print(f" ✓ #{num} ({repo})")
|
||||
else:
|
||||
print(f" ✗ #{num} ({repo}): {msg}")
|
||||
|
||||
# Recompute after counts for summary
|
||||
print("\nRecomputing after counts ...")
|
||||
after_issues = fetch_all_open_issues(token)
|
||||
by_agent_after = defaultdict(int)
|
||||
for iss in after_issues:
|
||||
for a in (iss.get("assignees") or []):
|
||||
by_agent_after[a.get("login")] += 1
|
||||
for agent in by_agent:
|
||||
by_agent[agent]["after"] = by_agent_after.get(agent, 0)
|
||||
|
||||
summary = build_summary(by_agent, unassignment_map)
|
||||
print("\n=== SUMMARY ===")
|
||||
print(summary)
|
||||
|
||||
if args.output:
|
||||
Path(args.output).write_text(summary)
|
||||
print(f"Summary written to {args.output}")
|
||||
|
||||
if args.comment_on:
|
||||
body = f"Open-load cap enforcement run (cap={args.cap}):\n\n```\n{summary}\n```"
|
||||
_, status = api("POST", f"/repos/{ORG}/timmy-home/issues/{args.comment_on}/comments", token, {"body": body})
|
||||
if status in (200, 201):
|
||||
print(f"\nSummary posted as comment on timmy-home issue #{args.comment_on}")
|
||||
else:
|
||||
print(f"\nWARNING: failed to post comment (HTTP {status})")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
Reference in New Issue
Block a user