Compare commits
1 Commits
sprint/iss
...
step35/666
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7bff0d5576 |
@@ -1,48 +0,0 @@
|
||||
# LUNA-1: Pink Unicorn Game — Project Scaffolding
|
||||
|
||||
Starter project for Mackenzie's Pink Unicorn Game built with **p5.js 1.9.0**.
|
||||
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
cd luna
|
||||
python3 -m http.server 8080
|
||||
# Visit http://localhost:8080
|
||||
```
|
||||
|
||||
Or simply open `luna/index.html` directly in a browser.
|
||||
|
||||
## Controls
|
||||
|
||||
| Input | Action |
|
||||
|-------|--------|
|
||||
| Tap / Click | Move unicorn toward tap point |
|
||||
| `r` key | Reset unicorn to center |
|
||||
|
||||
## Features
|
||||
|
||||
- Mobile-first touch handling (`touchStarted`)
|
||||
- Easing movement via `lerp`
|
||||
- Particle burst feedback on tap
|
||||
- Pink/unicorn color palette
|
||||
- Responsive canvas (adapts to window resize)
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
luna/
|
||||
├── index.html # p5.js CDN import + canvas container
|
||||
├── sketch.js # Main game logic and rendering
|
||||
├── style.css # Pink/unicorn theme, responsive layout
|
||||
└── README.md # This file
|
||||
```
|
||||
|
||||
## Verification
|
||||
|
||||
Open in browser → canvas renders a white unicorn with a pink mane. Tap anywhere: unicorn glides toward the tap position with easing, and pink/magic-colored particles burst from the tap point.
|
||||
|
||||
## Technical Notes
|
||||
|
||||
- p5.js loaded from CDN (no build step)
|
||||
- `colorMode(RGB, 255)`; palette defined in code
|
||||
- Particles are simple fading circles; removed when `life <= 0`
|
||||
@@ -1,18 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>LUNA-3: Simple World — Floating Islands</title>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/1.9.0/p5.min.js"></script>
|
||||
<link rel="stylesheet" href="style.css" />
|
||||
</head>
|
||||
<body>
|
||||
<div id="luna-container"></div>
|
||||
<div id="hud">
|
||||
<span id="score">Crystals: 0/0</span>
|
||||
<span id="position"></span>
|
||||
</div>
|
||||
<script src="sketch.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
289
luna/sketch.js
289
luna/sketch.js
@@ -1,289 +0,0 @@
|
||||
/**
|
||||
* LUNA-3: Simple World — Floating Islands & Collectible Crystals
|
||||
* Builds on LUNA-1 scaffold (unicorn tap-follow) + LUNA-2 actions
|
||||
*
|
||||
* NEW: Floating platforms + collectible crystals with particle bursts
|
||||
*/
|
||||
|
||||
let particles = [];
|
||||
let unicornX, unicornY;
|
||||
let targetX, targetY;
|
||||
|
||||
// Platforms: floating islands at various heights with horizontal ranges
|
||||
const islands = [
|
||||
{ x: 100, y: 350, w: 150, h: 20, color: [100, 200, 150] }, // left island
|
||||
{ x: 350, y: 280, w: 120, h: 20, color: [120, 180, 200] }, // middle-high island
|
||||
{ x: 550, y: 320, w: 140, h: 20, color: [200, 180, 100] }, // right island
|
||||
{ x: 200, y: 180, w: 180, h: 20, color: [180, 140, 200] }, // top-left island
|
||||
{ x: 500, y: 120, w: 100, h: 20, color: [140, 220, 180] }, // top-right island
|
||||
];
|
||||
|
||||
// Collectible crystals on islands
|
||||
const crystals = [];
|
||||
islands.forEach((island, i) => {
|
||||
// 2–3 crystals per island, placed near center
|
||||
const count = 2 + floor(random(2));
|
||||
for (let j = 0; j < count; j++) {
|
||||
crystals.push({
|
||||
x: island.x + 30 + random(island.w - 60),
|
||||
y: island.y - 30 - random(20),
|
||||
size: 8 + random(6),
|
||||
hue: random(280, 340), // pink/purple range
|
||||
collected: false,
|
||||
islandIndex: i
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
let collectedCount = 0;
|
||||
const TOTAL_CRYSTALS = crystals.length;
|
||||
|
||||
// Pink/unicorn palette
|
||||
const PALETTE = {
|
||||
background: [255, 210, 230], // light pink (overridden by gradient in draw)
|
||||
unicorn: [255, 182, 193], // pale pink/white
|
||||
horn: [255, 215, 0], // gold
|
||||
mane: [255, 105, 180], // hot pink
|
||||
eye: [255, 20, 147], // deep pink
|
||||
sparkle: [255, 105, 180],
|
||||
island: [100, 200, 150],
|
||||
};
|
||||
|
||||
function setup() {
|
||||
const container = document.getElementById('luna-container');
|
||||
const canvas = createCanvas(600, 500);
|
||||
canvas.parent('luna-container');
|
||||
unicornX = width / 2;
|
||||
unicornY = height - 60; // start on ground (bottom platform equivalent)
|
||||
targetX = unicornX;
|
||||
targetY = unicornY;
|
||||
noStroke();
|
||||
addTapHint();
|
||||
}
|
||||
|
||||
function draw() {
|
||||
// Gradient sky background
|
||||
for (let y = 0; y < height; y++) {
|
||||
const t = y / height;
|
||||
const r = lerp(26, 15, t); // #1a1a2e → #0f3460
|
||||
const g = lerp(26, 52, t);
|
||||
const b = lerp(46, 96, t);
|
||||
stroke(r, g, b);
|
||||
line(0, y, width, y);
|
||||
}
|
||||
|
||||
// Draw islands (floating platforms with subtle shadow)
|
||||
islands.forEach(island => {
|
||||
push();
|
||||
// Shadow
|
||||
fill(0, 0, 0, 40);
|
||||
ellipse(island.x + island.w/2 + 5, island.y + 5, island.w + 10, island.h + 6);
|
||||
// Island body
|
||||
fill(island.color[0], island.color[1], island.color[2]);
|
||||
ellipse(island.x + island.w/2, island.y, island.w, island.h);
|
||||
// Top highlight
|
||||
fill(255, 255, 255, 60);
|
||||
ellipse(island.x + island.w/2, island.y - island.h/3, island.w * 0.6, island.h * 0.3);
|
||||
pop();
|
||||
});
|
||||
|
||||
// Draw crystals (glowing collectibles)
|
||||
crystals.forEach(c => {
|
||||
if (c.collected) return;
|
||||
push();
|
||||
translate(c.x, c.y);
|
||||
// Glow aura
|
||||
const glow = color(`hsla(${c.hue}, 80%, 70%, 0.4)`);
|
||||
noStroke();
|
||||
fill(glow);
|
||||
ellipse(0, 0, c.size * 2.2, c.size * 2.2);
|
||||
// Crystal body (diamond shape)
|
||||
const ccol = color(`hsl(${c.hue}, 90%, 75%)`);
|
||||
fill(ccol);
|
||||
beginShape();
|
||||
vertex(0, -c.size);
|
||||
vertex(c.size * 0.6, 0);
|
||||
vertex(0, c.size);
|
||||
vertex(-c.size * 0.6, 0);
|
||||
endShape(CLOSE);
|
||||
// Inner sparkle
|
||||
fill(255, 255, 255, 180);
|
||||
ellipse(0, 0, c.size * 0.5, c.size * 0.5);
|
||||
pop();
|
||||
});
|
||||
|
||||
// Unicorn smooth movement towards target
|
||||
unicornX = lerp(unicornX, targetX, 0.08);
|
||||
unicornY = lerp(unicornY, targetY, 0.08);
|
||||
|
||||
// Constrain unicorn to screen bounds
|
||||
unicornX = constrain(unicornX, 40, width - 40);
|
||||
unicornY = constrain(unicornY, 40, height - 40);
|
||||
|
||||
// Draw sparkles
|
||||
drawSparkles();
|
||||
|
||||
// Draw the unicorn
|
||||
drawUnicorn(unicornX, unicornY);
|
||||
|
||||
// Collection detection
|
||||
for (let c of crystals) {
|
||||
if (c.collected) continue;
|
||||
const d = dist(unicornX, unicornY, c.x, c.y);
|
||||
if (d < 35) {
|
||||
c.collected = true;
|
||||
collectedCount++;
|
||||
createCollectionBurst(c.x, c.y, c.hue);
|
||||
}
|
||||
}
|
||||
|
||||
// Update particles
|
||||
updateParticles();
|
||||
|
||||
// Update HUD
|
||||
document.getElementById('score').textContent = `Crystals: ${collectedCount}/${TOTAL_CRYSTALS}`;
|
||||
document.getElementById('position').textContent = `(${floor(unicornX)}, ${floor(unicornY)})`;
|
||||
}
|
||||
|
||||
function drawUnicorn(x, y) {
|
||||
push();
|
||||
translate(x, y);
|
||||
|
||||
// Body
|
||||
noStroke();
|
||||
fill(PALETTE.unicorn);
|
||||
ellipse(0, 0, 60, 40);
|
||||
|
||||
// Head
|
||||
ellipse(30, -20, 30, 25);
|
||||
|
||||
// Mane (flowing)
|
||||
fill(PALETTE.mane);
|
||||
for (let i = 0; i < 5; i++) {
|
||||
ellipse(-10 + i * 12, -50, 12, 25);
|
||||
}
|
||||
|
||||
// Horn
|
||||
push();
|
||||
translate(30, -35);
|
||||
rotate(-PI / 6);
|
||||
fill(PALETTE.horn);
|
||||
triangle(0, 0, -8, -35, 8, -35);
|
||||
pop();
|
||||
|
||||
// Eye
|
||||
fill(PALETTE.eye);
|
||||
ellipse(38, -22, 8, 8);
|
||||
|
||||
// Legs
|
||||
stroke(PALETTE.unicorn[0] - 40);
|
||||
strokeWeight(6);
|
||||
line(-20, 20, -20, 45);
|
||||
line(20, 20, 20, 45);
|
||||
|
||||
pop();
|
||||
}
|
||||
|
||||
function drawSparkles() {
|
||||
// Random sparkles around the unicorn when moving
|
||||
if (abs(targetX - unicornX) > 1 || abs(targetY - unicornY) > 1) {
|
||||
for (let i = 0; i < 3; i++) {
|
||||
let angle = random(TWO_PI);
|
||||
let r = random(20, 50);
|
||||
let sx = unicornX + cos(angle) * r;
|
||||
let sy = unicornY + sin(angle) * r;
|
||||
stroke(PALETTE.sparkle[0], PALETTE.sparkle[1], PALETTE.sparkle[2], 150);
|
||||
strokeWeight(2);
|
||||
point(sx, sy);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function createCollectionBurst(x, y, hue) {
|
||||
// Burst of particles spiraling outward
|
||||
for (let i = 0; i < 20; i++) {
|
||||
let angle = random(TWO_PI);
|
||||
let speed = random(2, 6);
|
||||
particles.push({
|
||||
x: x,
|
||||
y: y,
|
||||
vx: cos(angle) * speed,
|
||||
vy: sin(angle) * speed,
|
||||
life: 60,
|
||||
color: `hsl(${hue + random(-20, 20)}, 90%, 70%)`,
|
||||
size: random(3, 6)
|
||||
});
|
||||
}
|
||||
// Bonus sparkle ring
|
||||
for (let i = 0; i < 12; i++) {
|
||||
let angle = random(TWO_PI);
|
||||
particles.push({
|
||||
x: x,
|
||||
y: y,
|
||||
vx: cos(angle) * 4,
|
||||
vy: sin(angle) * 4,
|
||||
life: 40,
|
||||
color: 'rgba(255, 215, 0, 0.9)',
|
||||
size: 4
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function updateParticles() {
|
||||
for (let i = particles.length - 1; i >= 0; i--) {
|
||||
let p = particles[i];
|
||||
p.x += p.vx;
|
||||
p.y += p.vy;
|
||||
p.vy += 0.1; // gravity
|
||||
p.life--;
|
||||
p.vx *= 0.95;
|
||||
p.vy *= 0.95;
|
||||
if (p.life <= 0) {
|
||||
particles.splice(i, 1);
|
||||
continue;
|
||||
}
|
||||
push();
|
||||
stroke(p.color);
|
||||
strokeWeight(p.size);
|
||||
point(p.x, p.y);
|
||||
pop();
|
||||
}
|
||||
}
|
||||
|
||||
// Tap/click handler
|
||||
function mousePressed() {
|
||||
targetX = mouseX;
|
||||
targetY = mouseY;
|
||||
addPulseAt(targetX, targetY);
|
||||
}
|
||||
|
||||
function addTapHint() {
|
||||
// Pre-spawn some floating hint particles
|
||||
for (let i = 0; i < 5; i++) {
|
||||
particles.push({
|
||||
x: random(width),
|
||||
y: random(height),
|
||||
vx: random(-0.5, 0.5),
|
||||
vy: random(-0.5, 0.5),
|
||||
life: 200,
|
||||
color: 'rgba(233, 69, 96, 0.5)',
|
||||
size: 3
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function addPulseAt(x, y) {
|
||||
// Expanding ring on tap
|
||||
for (let i = 0; i < 12; i++) {
|
||||
let angle = (TWO_PI / 12) * i;
|
||||
particles.push({
|
||||
x: x,
|
||||
y: y,
|
||||
vx: cos(angle) * 3,
|
||||
vy: sin(angle) * 3,
|
||||
life: 30,
|
||||
color: 'rgba(233, 69, 96, 0.7)',
|
||||
size: 3
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
body {
|
||||
margin: 0;
|
||||
overflow: hidden;
|
||||
background: linear-gradient(to bottom, #1a1a2e, #16213e, #0f3460);
|
||||
font-family: 'Courier New', monospace;
|
||||
color: #e94560;
|
||||
}
|
||||
|
||||
#luna-container {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100vw;
|
||||
height: 100vh;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
#hud {
|
||||
position: fixed;
|
||||
top: 10px;
|
||||
left: 10px;
|
||||
background: rgba(0, 0, 0, 0.6);
|
||||
padding: 8px 12px;
|
||||
border-radius: 4px;
|
||||
font-size: 14px;
|
||||
z-index: 100;
|
||||
border: 1px solid #e94560;
|
||||
}
|
||||
|
||||
#score { font-weight: bold; }
|
||||
@@ -1,20 +1,12 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
genome_analyzer.py — Generate a GENOME.md from a codebase.
|
||||
genome_analyzer.py — Generate a GENOME.md from a codebase using the canonical template.
|
||||
|
||||
Scans a repository and produces a structured codebase genome with:
|
||||
- File counts by type
|
||||
- Architecture overview (directory structure)
|
||||
- Entry points
|
||||
- Test coverage summary
|
||||
Scans a repository and fills in templates/GENOME-template.md with discovered
|
||||
structure, entry points, and test coverage. Manual analysis sections are
|
||||
preserved with "(To be completed...)" placeholders.
|
||||
|
||||
Usage:
|
||||
python3 scripts/genome_analyzer.py /path/to/repo
|
||||
python3 scripts/genome_analyzer.py /path/to/repo --output GENOME.md
|
||||
python3 scripts/genome_analyzer.py /path/to/repo --dry-run
|
||||
|
||||
Part of #666: GENOME.md Template + Single-Repo Analyzer.
|
||||
"""
|
||||
Part of #666: GENOME.md Template + Single-Repo Analyzer."""
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
@@ -23,25 +15,32 @@ from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
SKIP_DIRS = {".git", "__pycache__", ".venv", "venv", "node_modules", ".tox", ".pytest_cache", ".DS_Store"}
|
||||
SKIP_DIRS = {".git", "__pycache__", ".venv", "venv", "node_modules",
|
||||
".tox", ".pytest_cache", ".DS_Store", "dist", "build", "coverage"}
|
||||
|
||||
|
||||
def _is_source(p: Path) -> bool:
|
||||
return p.suffix in {".py", ".js", ".ts", ".mjs", ".cjs", ".jsx",
|
||||
".tsx", ".sh"} and not p.name.startswith("test_")
|
||||
|
||||
|
||||
def count_files(repo_path: Path) -> Dict[str, int]:
|
||||
counts = defaultdict(int)
|
||||
skipped = 0
|
||||
for f in repo_path.rglob("*"):
|
||||
if any(part in SKIP_DIRS for part in f.parts):
|
||||
continue
|
||||
if f.is_file():
|
||||
if any(part in SKIP_DIRS for part in f.parts):
|
||||
continue
|
||||
ext = f.suffix or "(no ext)"
|
||||
counts[ext] += 1
|
||||
return dict(sorted(counts.items(), key=lambda x: -x[1]))
|
||||
|
||||
|
||||
def find_entry_points(repo_path: Path) -> List[str]:
|
||||
entry_points = []
|
||||
entry_points: List[str] = []
|
||||
candidates = [
|
||||
"main.py", "app.py", "server.py", "cli.py", "manage.py",
|
||||
"index.html", "index.js", "index.ts",
|
||||
"__main__.py", "index.html", "index.js", "index.ts",
|
||||
"Makefile", "Dockerfile", "docker-compose.yml",
|
||||
"README.md", "deploy.sh", "setup.py", "pyproject.toml",
|
||||
]
|
||||
@@ -53,27 +52,46 @@ def find_entry_points(repo_path: Path) -> List[str]:
|
||||
for f in sorted(scripts_dir.iterdir()):
|
||||
if f.suffix in (".py", ".sh") and not f.name.startswith("test_"):
|
||||
entry_points.append(f"scripts/{f.name}")
|
||||
return entry_points[:15]
|
||||
src_dir = repo_path / "src"
|
||||
if src_dir.is_dir():
|
||||
for f in sorted(src_dir.iterdir()):
|
||||
if f.is_file() and f.suffix == ".py" and not f.name.startswith("test_"):
|
||||
entry_points.append(f"src/{f.name}")
|
||||
top_py = [f.name for f in repo_path.iterdir()
|
||||
if f.is_file() and f.suffix == ".py" and _is_source(f)]
|
||||
entry_points.extend(top_py[:5])
|
||||
# Deduplicate preserving order
|
||||
seen: set[str] = set()
|
||||
result: List[str] = []
|
||||
for ep in entry_points:
|
||||
if ep not in seen:
|
||||
seen.add(ep)
|
||||
result.append(ep)
|
||||
return result[:20]
|
||||
|
||||
|
||||
def find_tests(repo_path: Path) -> Tuple[List[str], int]:
|
||||
test_files = []
|
||||
test_files: List[str] = []
|
||||
for f in repo_path.rglob("*"):
|
||||
if any(part in SKIP_DIRS for part in f.parts):
|
||||
continue
|
||||
if f.is_file() and (f.name.startswith("test_") or f.name.endswith("_test.py") or f.name.endswith("_test.js")):
|
||||
test_files.append(str(f.relative_to(repo_path)))
|
||||
if f.is_file():
|
||||
if any(part in SKIP_DIRS for part in f.parts):
|
||||
continue
|
||||
name = f.name
|
||||
if name.startswith("test_") or name.endswith("_test.py") or name.endswith(".test.js"):
|
||||
test_files.append(str(f.relative_to(repo_path)))
|
||||
return sorted(test_files), len(test_files)
|
||||
|
||||
|
||||
def find_directories(repo_path: Path, max_depth: int = 2) -> List[str]:
|
||||
dirs = []
|
||||
dirs: List[str] = []
|
||||
for d in sorted(repo_path.rglob("*")):
|
||||
if d.is_dir() and len(d.relative_to(repo_path).parts) <= max_depth:
|
||||
if not any(part in SKIP_DIRS for part in d.parts):
|
||||
rel = str(d.relative_to(repo_path))
|
||||
if rel != ".":
|
||||
dirs.append(rel)
|
||||
if d.is_dir():
|
||||
depth = len(d.relative_to(repo_path).parts)
|
||||
if depth <= max_depth:
|
||||
if not any(part in SKIP_DIRS for part in d.parts):
|
||||
rel = str(d.relative_to(repo_path))
|
||||
if rel != "." and rel not in dirs:
|
||||
dirs.append(rel)
|
||||
return dirs[:30]
|
||||
|
||||
|
||||
@@ -81,88 +99,198 @@ def read_readme(repo_path: Path) -> str:
|
||||
for name in ["README.md", "README.rst", "README.txt", "README"]:
|
||||
readme = repo_path / name
|
||||
if readme.exists():
|
||||
lines = readme.read_text(encoding="utf-8", errors="replace").split("\n")
|
||||
para = []
|
||||
started = False
|
||||
for line in lines:
|
||||
if line.startswith("#") and not started:
|
||||
text = readme.read_text(encoding="utf-8", errors="replace")
|
||||
paras: List[str] = []
|
||||
for line in text.splitlines():
|
||||
stripped = line.strip()
|
||||
if stripped.startswith("#"):
|
||||
continue
|
||||
if line.strip():
|
||||
started = True
|
||||
para.append(line.strip())
|
||||
elif started:
|
||||
if stripped:
|
||||
paras.append(stripped)
|
||||
elif paras:
|
||||
break
|
||||
return " ".join(para[:5])
|
||||
return " ".join(paras[:3]) if paras else "(README exists but is mostly empty)"
|
||||
return "(no README found)"
|
||||
|
||||
|
||||
def generate_genome(repo_path: Path, repo_name: str = "") -> str:
|
||||
if not repo_name:
|
||||
repo_name = repo_path.name
|
||||
date = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
||||
readme_desc = read_readme(repo_path)
|
||||
file_counts = count_files(repo_path)
|
||||
total_files = sum(file_counts.values())
|
||||
entry_points = find_entry_points(repo_path)
|
||||
test_files, test_count = find_tests(repo_path)
|
||||
dirs = find_directories(repo_path)
|
||||
|
||||
lines = [
|
||||
f"# GENOME.md — {repo_name}", "",
|
||||
f"> Codebase analysis generated {date}. {readme_desc[:100]}.", "",
|
||||
"## Project Overview", "",
|
||||
readme_desc, "",
|
||||
f"**{total_files} files** across {len(file_counts)} file types.", "",
|
||||
"## Architecture", "",
|
||||
"```",
|
||||
]
|
||||
for d in dirs[:20]:
|
||||
lines.append(f" {d}/")
|
||||
lines.append("```")
|
||||
lines += ["", "### File Types", "", "| Type | Count |", "|------|-------|"]
|
||||
for ext, count in list(file_counts.items())[:15]:
|
||||
lines.append(f"| {ext} | {count} |")
|
||||
lines += ["", "## Entry Points", ""]
|
||||
for ep in entry_points:
|
||||
lines.append(f"- `{ep}`")
|
||||
lines += ["", "## Test Coverage", "", f"**{test_count} test files** found.", ""]
|
||||
if test_files:
|
||||
for tf in test_files[:10]:
|
||||
lines.append(f"- `{tf}`")
|
||||
if len(test_files) > 10:
|
||||
lines.append(f"- ... and {len(test_files) - 10} more")
|
||||
else:
|
||||
lines.append("No test files found.")
|
||||
lines += ["", "## Security Considerations", "", "(To be filled during analysis)", ""]
|
||||
lines += ["## Design Decisions", "", "(To be filled during analysis)", ""]
|
||||
def _mermaid_diagram(repo_name: str, dirs: List[str], entry_points: List[str]) -> str:
|
||||
lines = ["graph TD", f' root["{repo_name} (repo root)"]']
|
||||
for d in dirs[:15]:
|
||||
safe = d.replace("/", "_").replace("-", "_")
|
||||
lines.append(f' root --> {safe}["{d}/"]')
|
||||
lines.append("")
|
||||
lines.append(" %% Entry points (leaf nodes)")
|
||||
for ep in entry_points[:10]:
|
||||
safe_ep = ep.replace("/", "_").replace(".", "_").replace("-", "_")
|
||||
parent = ep.split("/")[0] if "/" in ep else "root"
|
||||
parent_safe = parent.replace("/", "_").replace("-", "_")
|
||||
lines.append(f' {parent_safe} --> {safe_ep}["{ep}"]')
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Generate GENOME.md from a codebase")
|
||||
parser.add_argument("repo_path", help="Path to repository")
|
||||
parser.add_argument("--output", default="", help="Output file (default: stdout)")
|
||||
parser.add_argument("--name", default="", help="Repository name")
|
||||
parser.add_argument("--dry-run", action="store_true", help="Print stats only")
|
||||
def _bullet_list(items: List[str]) -> str:
|
||||
if not items:
|
||||
return "(none discovered)"
|
||||
return "\n".join(f"- `{item}`" for item in items[:20])
|
||||
|
||||
|
||||
def _comma_list(items: List[str]) -> str:
|
||||
return ", ".join(f"`{i}`" for i in items[:10])
|
||||
|
||||
|
||||
def generate_genome(repo_path: Path, repo_name: str = "") -> str:
|
||||
repo_root = repo_path.resolve()
|
||||
if not repo_name:
|
||||
repo_name = repo_path.name
|
||||
|
||||
date = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
||||
readme_desc = read_readme(repo_root)
|
||||
short_desc = readme_desc[:120] + "…" if len(readme_desc) > 120 else readme_desc
|
||||
|
||||
file_counts = count_files(repo_root)
|
||||
total_files = sum(file_counts.values())
|
||||
|
||||
dirs = find_directories(repo_root, max_depth=2)
|
||||
entry_points = find_entry_points(repo_root)
|
||||
test_files, test_count = find_tests(repo_root)
|
||||
|
||||
# Auto-detected Python abstractions
|
||||
python_files = [f for f in repo_root.rglob("*.py")
|
||||
if f.is_file() and not any(p in SKIP_DIRS for p in f.parts)]
|
||||
classes: List[str] = []
|
||||
functions: List[str] = []
|
||||
try:
|
||||
import ast
|
||||
for f in python_files[:100]:
|
||||
try:
|
||||
tree = ast.parse(f.read_text(encoding="utf-8", errors="replace"))
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.ClassDef):
|
||||
classes.append(f"{f.relative_to(repo_root)}::{node.name}")
|
||||
elif isinstance(node, ast.FunctionDef) and not node.name.startswith("_"):
|
||||
qual = f"{f.relative_to(repo_root)}::{node.name}"
|
||||
functions.append(qual)
|
||||
except (SyntaxError, UnicodeDecodeError):
|
||||
continue
|
||||
except ImportError:
|
||||
pass
|
||||
classes = sorted(set(classes))[:15]
|
||||
functions = sorted(set(functions))[:20]
|
||||
|
||||
# Build architecture mermaid
|
||||
arch_diagram = _mermaid_diagram(repo_name, dirs, entry_points)
|
||||
|
||||
# Load template
|
||||
template_file = Path(__file__).resolve().parent.parent / "templates" / "GENOME-template.md"
|
||||
|
||||
if template_file.exists():
|
||||
template_text = template_file.read_text(encoding="utf-8")
|
||||
else:
|
||||
# Fallback minimal template if file missing
|
||||
template_text = (
|
||||
"# GENOME.md — {REPO_NAME}\n\n"
|
||||
"> Codebase analysis generated {DATE}. {SHORT_DESCRIPTION}.\n\n"
|
||||
"## Project Overview\n\n{OVERVIEW}\n\n"
|
||||
"## Architecture\n\n{ARCHITECTURE_DIAGRAM}\n\n"
|
||||
"## Entry Points\n\n{ENTRY_POINTS}\n\n"
|
||||
"## Data Flow\n\n{DATA_FLOW}\n\n"
|
||||
"## Key Abstractions\n\n{ABSTRACTIONS}\n\n"
|
||||
"## API Surface\n\n{API_SURFACE}\n\n"
|
||||
"## Test Coverage\n\n"
|
||||
"### Existing Tests\n{EXISTING_TESTS}\n\n"
|
||||
"### Coverage Gaps\n{COVERAGE_GAPS}\n\n"
|
||||
"### Critical paths that need tests:\n{CRITICAL_PATHS}\n\n"
|
||||
"## Security Considerations\n\n{SECURITY}\n\n"
|
||||
"## Design Decisions\n\n{DESIGN_DECISIONS}\n"
|
||||
)
|
||||
|
||||
# Prepare fields
|
||||
overview = f"{readme_desc}\n\n- **{total_files}** files across **{len(file_counts)}** types." + (
|
||||
f"\n- Primary languages: {_comma_list([f'{k}:{v}' for k,v in list(file_counts.items())[:5]])}."
|
||||
)
|
||||
|
||||
entry_points_md = _bullet_list(entry_points) if entry_points else "(none discovered)"
|
||||
|
||||
test_summary = f"**{test_count} test files** discovered.\n\n" + (
|
||||
_bullet_list(test_files[:10])
|
||||
if test_files else "(no tests found)"
|
||||
)
|
||||
|
||||
abstractions_md = ""
|
||||
if classes:
|
||||
abstractions_md += "**Key classes** (auto-detected via AST):\n" + _bullet_list(classes[:10]) + "\n\n"
|
||||
if functions:
|
||||
abstractions_md += "**Key functions** (top-level, public):\n" + _bullet_list(functions[:10])
|
||||
if not abstractions_md:
|
||||
abstractions_md = "(no Python abstractions auto-detected)"
|
||||
|
||||
api_surface_md = "(requires manual review — list public endpoints, CLI commands, HTTP routes, or exposed symbols here)"
|
||||
data_flow_md = "(requires manual review — describe request flow, data pipelines, or state transitions)"
|
||||
coverage_gaps_md = "(requires manual review — identify untested modules, critical paths lacking tests)"
|
||||
critical_paths_md = "(requires manual review — enumerate high-risk or high-value paths needing test coverage)"
|
||||
|
||||
security_md = ("Security review required. Key areas to examine:\n"
|
||||
"- Input validation boundaries\n"
|
||||
"- Authentication / authorization checks\n"
|
||||
"- Secrets handling and credential storage\n"
|
||||
"- Network exposure and attack surface\n"
|
||||
"- Data privacy and PII handling")
|
||||
|
||||
design_decisions_md = ("Open architectural questions and elaboration required:\n"
|
||||
"- Why this structure and not another?\n"
|
||||
"- What constraints shaped current abstractions?\n"
|
||||
"- What trade-offs were accepted and why?\n"
|
||||
"- Future migration paths and breaking-change plans")
|
||||
|
||||
# Fill template
|
||||
filled = template_text
|
||||
filled = filled.replace("{{REPO_NAME}}", repo_name)
|
||||
filled = filled.replace("{{DATE}}", date)
|
||||
filled = filled.replace("{{SHORT_DESCRIPTION}}", short_desc)
|
||||
filled = filled.replace("{{OVERVIEW}}", overview)
|
||||
filled = filled.replace("{{ARCHITECTURE_DIAGRAM}}", arch_diagram)
|
||||
filled = filled.replace("{{ENTRY_POINTS}}", entry_points_md)
|
||||
filled = filled.replace("{{DATA_FLOW}}", data_flow_md)
|
||||
filled = filled.replace("{{ABSTRACTIONS}}", abstractions_md)
|
||||
filled = filled.replace("{{API_SURFACE}}", api_surface_md)
|
||||
filled = filled.replace("{{EXISTING_TESTS}}", test_summary)
|
||||
filled = filled.replace("{{COVERAGE_GAPS}}", coverage_gaps_md)
|
||||
filled = filled.replace("{{CRITICAL_PATHS}}", critical_paths_md)
|
||||
filled = filled.replace("{{SECURITY}}", security_md)
|
||||
filled = filled.replace("{{DESIGN_DECISIONS}}", design_decisions_md)
|
||||
return filled
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Generate GENOME.md from a codebase using the canonical template")
|
||||
parser.add_argument("repo_path", help="Path to repository root")
|
||||
parser.add_argument("--output", "-o", default="", help="Write GENOME.md to this path (default: stdout)")
|
||||
parser.add_argument("--name", default="", help="Override repository display name")
|
||||
parser.add_argument("--dry-run", action="store_true", help="Print discovered stats without generating file")
|
||||
args = parser.parse_args()
|
||||
|
||||
repo_path = Path(args.repo_path).resolve()
|
||||
if not repo_path.is_dir():
|
||||
print(f"ERROR: {repo_path} is not a directory", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
repo_name = args.name or repo_path.name
|
||||
|
||||
if args.dry_run:
|
||||
counts = count_files(repo_path)
|
||||
_, test_count = find_tests(repo_path)
|
||||
print(f"Repo: {repo_name}")
|
||||
print(f"Total files: {sum(counts.values())}")
|
||||
print(f"Total files (text): {sum(counts.values())}")
|
||||
print(f"Test files: {test_count}")
|
||||
print(f"Top types: {', '.join(f'{k}={v}' for k,v in list(counts.items())[:5])}")
|
||||
sys.exit(0)
|
||||
|
||||
genome = generate_genome(repo_path, repo_name)
|
||||
|
||||
if args.output:
|
||||
with open(args.output, "w") as f:
|
||||
f.write(genome)
|
||||
print(f"Written: {args.output}")
|
||||
out = Path(args.output)
|
||||
out.write_text(genome, encoding="utf-8")
|
||||
print(f"GENOME.md written: {out}")
|
||||
else:
|
||||
print(genome)
|
||||
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
# Fleet Operator Incentives & Partner Program
|
||||
|
||||
## Overview
|
||||
|
||||
This document defines the incentive structure, certification pathway, and operational framework for Fleet Operators within the Timmy ecosystem. It implements Fleet Epic IV - Human Capital & Incentives.
|
||||
|
||||
## Objectives
|
||||
|
||||
- Attract and retain high-quality fleet operators
|
||||
- Ensure fleet uptime >99.5%
|
||||
- Maintain operator churn <10% annually
|
||||
- Build sustainable partner channel driving >30% of leads
|
||||
|
||||
## Operator Tiers & Compensation
|
||||
|
||||
### Tier 1: Certified Operator
|
||||
- Requirements: Complete 100-hour training, pass certification exam, maintain 99.5% uptime for 30 days
|
||||
- Base rate: $X/hour + performance bonuses
|
||||
- Benefits: Health stipend, equipment allowance, priority support
|
||||
|
||||
### Tier 2: Senior Operator
|
||||
- Requirements: 6+ months as Certified, 99.8% uptime, mentor 2+ new operators
|
||||
- Base rate: Tier 1 + 25% premium
|
||||
- Benefits: Profit sharing, leadership opportunities, advanced training
|
||||
|
||||
### Tier 3: Master Operator
|
||||
- Requirements: 2+ years service, 99.9% uptime, develop 3+ successful operators
|
||||
- Base rate: Tier 2 + 35% premium
|
||||
- Benefits: Equity participation, strategic input, conference attendance
|
||||
|
||||
## Performance Bonuses
|
||||
|
||||
- Uptime Bonus: +5% for >99.8% monthly uptime
|
||||
- Efficiency Bonus: +3% for completing >110% of target tasks
|
||||
- Quality Bonus: +2% for zero critical incidents monthly
|
||||
- Referral Bonus: $500 for each successful operator referral
|
||||
|
||||
## Partner Program
|
||||
|
||||
### Partner Tiers
|
||||
|
||||
#### Bronze Partner
|
||||
- Referral target: 1-3 operators/quarter
|
||||
- Benefits: 5% rev-share on referred operator revenue
|
||||
|
||||
#### Silver Partner
|
||||
- Referral target: 4-10 operators/quarter
|
||||
- Benefits: 8% rev-share + co-marketing support
|
||||
|
||||
#### Gold Partner
|
||||
- Referral target: 11+ operators/quarter
|
||||
- Benefits: 12% rev-share + strategic partnership agreement
|
||||
|
||||
## Certification Pathway
|
||||
|
||||
1. **Application** → Submit through operator-application.md template
|
||||
2. **Screening** → Background check, technical assessment
|
||||
3. **Training** → Complete 100-hour Fleet Ops curriculum
|
||||
4. **Certification Exam** → Written + practical components
|
||||
5. **Onboarding** → Shadowing, gradual ramp-up
|
||||
6. **Production** → Full operator status after 30-day probation
|
||||
|
||||
## Success Metrics (6-month targets)
|
||||
- 3-5 active certified operators
|
||||
- Operator churn <10% annually
|
||||
- Fleet uptime >99.5%
|
||||
- Partner channel >30% of leads
|
||||
@@ -1,101 +0,0 @@
|
||||
# Fleet Operations Runbook
|
||||
|
||||
## Purpose
|
||||
|
||||
Standard operating procedures for Fleet Operators to ensure consistent, high-quality service delivery.
|
||||
|
||||
## Daily Operations
|
||||
|
||||
### 1. Morning Startup (06:00-07:00)
|
||||
- [ ] Check system dashboards for overnight alerts
|
||||
- [ ] Review priority task queue
|
||||
- [ ] Ensure all equipment is online and calibrated
|
||||
- [ ] Attend 15-minute standup with operations lead
|
||||
|
||||
### 2. Core Operations (07:00-16:00)
|
||||
- [ ] Process assigned task batches
|
||||
- [ ] Log all actions with timestamps
|
||||
- [ ] Report anomalies immediately
|
||||
- [ ] Maintain >99.5% uptime SLAs
|
||||
|
||||
### 3. Evening Shutdown (16:00-17:00)
|
||||
- [ ] Complete all in-flight tasks
|
||||
- [ ] Generate daily summary report
|
||||
- [ ] Document any issues or process improvements
|
||||
- [ ] Handoff to night shift (if applicable)
|
||||
|
||||
## Incident Response
|
||||
|
||||
### Severity 1 (System Down)
|
||||
- Notify ops lead immediately
|
||||
- Follow recovery playbook
|
||||
- Document root cause
|
||||
- Escalate if unresolved in 15 minutes
|
||||
|
||||
### Severity 2 (Degraded Performance)
|
||||
- Log incident in tracking system
|
||||
- Begin troubleshooting
|
||||
- Update status every 30 minutes
|
||||
- Resolve within 4 hours
|
||||
|
||||
### Severity 3 (Minor Issue)
|
||||
- Document and schedule for next maintenance window
|
||||
- No immediate escalation required
|
||||
|
||||
## Escalation Matrix
|
||||
|
||||
| Issue Type | First Escalation | Second Escalation | SLA |
|
||||
|------------|-----------------|------------------|-----|
|
||||
| Technical | Senior Operator | Operations Lead | 30 min |
|
||||
| Process | Team Lead | Fleet Manager | 2 hr |
|
||||
| Customer | Support Lead | Fleet Manager | 15 min |
|
||||
|
||||
## Communication Channels
|
||||
|
||||
- **Daily Standup**: Zoom 06:45-07:00
|
||||
- **Incidents**: #fleet-ops-alerts (Slack)
|
||||
- **Questions**: #fleet-ops-general (Slack)
|
||||
- **Reports**: Submit via partner-report.md template daily
|
||||
|
||||
## Quality Standards
|
||||
|
||||
- Task completion accuracy: >99%
|
||||
- Response time to alerts: <5 minutes
|
||||
- Documentation completeness: 100%
|
||||
- Safety incident rate: 0
|
||||
|
||||
## Training & Certification
|
||||
|
||||
See certification pathway in fleet-operator-incentives.md. Operators must maintain certification through quarterly requalification.
|
||||
|
||||
## Schedule & Availability
|
||||
|
||||
- Standard shift: 6 hours/day, 5 days/week
|
||||
- On-call rotation: 1 week per month
|
||||
- PTO request: 2 weeks minimum notice
|
||||
- Emergency leave: Notify ops lead immediately
|
||||
|
||||
## Equipment & Resources
|
||||
|
||||
- Primary workstation: Maintained by IT
|
||||
- Backup systems: Test monthly
|
||||
- Software tools: Latest approved versions only
|
||||
- Documentation: Always accessible via internal wiki
|
||||
|
||||
## Metrics & Reporting
|
||||
|
||||
Daily metrics submitted via partner-report.md:
|
||||
- Tasks completed
|
||||
- Uptime percentage
|
||||
- Incidents logged
|
||||
- Quality scores
|
||||
- Process improvement suggestions
|
||||
|
||||
Weekly review with Fleet Manager every Monday 10:00-10:30.
|
||||
|
||||
## Appendix
|
||||
|
||||
- A: System Architecture Overview
|
||||
- B: Troubleshooting Playbooks
|
||||
- C: Contact Directory
|
||||
- D: Compliance Requirements
|
||||
@@ -1,65 +0,0 @@
|
||||
---
|
||||
# Fleet Operator Application
|
||||
application_date: YYYY-MM-DD
|
||||
candidate_name:
|
||||
---
|
||||
|
||||
## Personal Information
|
||||
|
||||
- Full Name:
|
||||
- Email:
|
||||
- Phone:
|
||||
- Location (City/State/Country):
|
||||
- Time Zone:
|
||||
|
||||
## Professional Background
|
||||
|
||||
### Relevant Experience
|
||||
- Years in operations/technical roles:
|
||||
- Fleet management experience:
|
||||
- Previous certifications:
|
||||
- Equipment familiarity:
|
||||
|
||||
### Technical Skills
|
||||
- [ ] System monitoring
|
||||
- [ ] Incident response
|
||||
- [ ] Documentation
|
||||
- [ ] Team collaboration
|
||||
- [ ] Other (specify):
|
||||
|
||||
## Availability
|
||||
|
||||
- Start date available:
|
||||
- Weekly hours sought:
|
||||
- On-call willingness: [ ] Yes [ ] No
|
||||
- Remote work preference: [ ] Fully remote [ ] Hybrid [ ] On-site
|
||||
|
||||
## Compensation Expectations
|
||||
|
||||
- Desired hourly rate:
|
||||
- Minimum acceptable rate:
|
||||
|
||||
## Why Timmy?
|
||||
|
||||
*(Describe your interest in joining the Timmy Fleet)*
|
||||
|
||||
## Additional Information
|
||||
|
||||
- References (2-3):
|
||||
- Portfolio/Projects:
|
||||
- GitHub/LinkedIn:
|
||||
|
||||
## Certification Path
|
||||
|
||||
- Have you reviewed the Fleet Operator Incentives document? [ ] Yes [ ] No
|
||||
- Are you willing to complete the 100-hour training program? [ ] Yes [ ] No
|
||||
|
||||
---
|
||||
|
||||
**Application Process:**
|
||||
1. Submit this form
|
||||
2. Technical screening (phone)
|
||||
3. Background check
|
||||
4. Training enrollment
|
||||
5. Certification exam
|
||||
6. Probation period (30 days)
|
||||
@@ -1,69 +0,0 @@
|
||||
---
|
||||
# Fleet Partner Report
|
||||
reporting_period:
|
||||
partner_name:
|
||||
partner_tier:
|
||||
---
|
||||
|
||||
## Executive Summary
|
||||
|
||||
- Period:
|
||||
- Total referred operators this period:
|
||||
- Active operators from referrals:
|
||||
- Revenue generated from referrals:
|
||||
- Status: [ ] On Track [ ] At Risk [ ] Exceeding Target
|
||||
|
||||
## Referral Activity
|
||||
|
||||
| Referral Name | Application Date | Status | Revenue Impact |
|
||||
|---------------|-----------------|--------|----------------|
|
||||
| | | | |
|
||||
| | | | |
|
||||
|
||||
**Total referrals:**
|
||||
**Converted to active operators:**
|
||||
**Conversion rate:**
|
||||
|
||||
## Financial Summary
|
||||
|
||||
- Referral fees earned this period:
|
||||
- Cumulative referral fees:
|
||||
- Revenue share percentage:
|
||||
- Projected next period revenue:
|
||||
|
||||
## Partner Performance Metrics
|
||||
|
||||
| Metric | Target | Actual | Variance |
|
||||
|--------|--------|--------|----------|
|
||||
| Referrals/quarter | | | |
|
||||
| Conversion rate | >50% | | |
|
||||
| Revenue contribution | >30% leads | | |
|
||||
| Partner NPS | >50 | | |
|
||||
|
||||
## Challenges & Blockers
|
||||
|
||||
*(Describe any issues affecting partner performance)*
|
||||
|
||||
## Support Needed
|
||||
|
||||
*(List any resources or support needed from Timmy to improve performance)*
|
||||
|
||||
## Goals for Next Period
|
||||
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
## Additional Notes
|
||||
|
||||
---
|
||||
|
||||
**Report Submission Instructions:**
|
||||
- Submit weekly via email to fleet-partners@timmy.io
|
||||
- Copy your Partner Success Manager
|
||||
- Attach any supporting documentation
|
||||
|
||||
**Review Process:**
|
||||
- Weekly review: Partner Success Team
|
||||
- Monthly review: Fleet Leadership
|
||||
- Quarterly review: Executive Team
|
||||
Reference in New Issue
Block a user