Compare commits
8 Commits
step35/594
...
step35/436
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e78f97ef5c | ||
|
|
54a6def7e8 | ||
|
|
ba4220d5ed | ||
|
|
2451f38bee | ||
|
|
54093991ab | ||
|
|
1ea6bf6e33 | ||
|
|
874ce137b0 | ||
| 5eef5b48c8 |
87
bin/gitea-backup.sh
Normal file
87
bin/gitea-backup.sh
Normal file
@@ -0,0 +1,87 @@
|
||||
#!/bin/bash
|
||||
# Gitea Daily Backup Script
|
||||
# Uses Gitea's native dump command to create automated backups of repositories and SQLite databases.
|
||||
# Designed to run on the VPS (Ezra) as part of a daily cron job.
|
||||
#
|
||||
# Configuration via environment variables:
|
||||
# GITEA_BIN Path to gitea binary (default: auto-detect)
|
||||
# GITEA_BACKUP_DIR Directory for backup archives (default: /var/backups/gitea)
|
||||
# GITEA_BACKUP_RETENTION Days to retain backups (default: 7)
|
||||
# GITEA_BACKUP_LOG Log file path (default: /var/log/gitea-backup.log)
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
GITEA_BIN="${GITEA_BIN:-$(command -v gitea 2>/dev/null || echo "/usr/local/bin/gitea")}"
|
||||
BACKUP_DIR="${GITEA_BACKUP_DIR:-/var/backups/gitea}"
|
||||
RETENTION_DAYS="${GITEA_BACKUP_RETENTION:-7}"
|
||||
DATE="$(date +%Y-%m-%d_%H%M%S)"
|
||||
BACKUP_FILE="${BACKUP_DIR}/gitea-backup-${DATE}.tar.gz"
|
||||
LOG_FILE="${GITEA_BACKUP_LOG:-/var/log/gitea-backup.log}"
|
||||
|
||||
mkdir -p "${BACKUP_DIR}"
|
||||
|
||||
log() {
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*" | tee -a "${LOG_FILE}"
|
||||
}
|
||||
|
||||
log "=== Starting Gitea daily backup ==="
|
||||
|
||||
# Verify gitea binary exists
|
||||
if [ ! -x "${GITEA_BIN}" ]; then
|
||||
log "ERROR: Gitea binary not found at ${GITEA_BIN}"
|
||||
log "Set GITEA_BIN environment variable to the gitea binary path (e.g., /usr/bin/gitea)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Detect Gitea WORK_PATH
|
||||
WORK_PATH=""
|
||||
APP_INI=""
|
||||
for path in /etc/gitea/app.ini /home/git/gitea/custom/conf/app.ini ~/gitea/custom/conf/app.ini; do
|
||||
if [ -f "$path" ]; then
|
||||
APP_INI="$path"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$APP_INI" ]; then
|
||||
# Parse [app] WORK_PATH = /var/lib/gitea
|
||||
WORK_PATH=$(sed -n 's/^[[:space:]]*WORK_PATH[[:space:]]*=[[:space:]]*//p' "$APP_INI" | head -1)
|
||||
log "Detected WORK_PATH from app.ini: ${WORK_PATH}"
|
||||
fi
|
||||
|
||||
# Fallback detection
|
||||
if [ -z "$WORK_PATH" ]; then
|
||||
for d in /var/lib/gitea /home/git/gitea /srv/gitea /opt/gitea; do
|
||||
if [ -d "$d" ]; then
|
||||
WORK_PATH="$d"
|
||||
break
|
||||
fi
|
||||
done
|
||||
log "Inferred WORK_PATH: ${WORK_PATH:-not found}"
|
||||
fi
|
||||
|
||||
if [ -z "$WORK_PATH" ]; then
|
||||
log "ERROR: Could not determine Gitea WORK_PATH. Set GITEA_WORK_PATH manually."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Perform gitea dump
|
||||
# Flags: --work-path sets the Gitea working directory, --file writes dump to tar.gz
|
||||
log "Running: gitea dump --work-path ${WORK_PATH} --file ${BACKUP_FILE}"
|
||||
"${GITEA_BIN}" dump --work-path "${WORK_PATH}" --file "${BACKUP_FILE}" 2>>"${LOG_FILE}"
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
log "ERROR: gitea dump failed — check ${LOG_FILE} for details"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
FILE_SIZE=$(du -h "${BACKUP_FILE}" | cut -f1)
|
||||
log "Backup created: ${BACKUP_FILE} (${FILE_SIZE})"
|
||||
|
||||
# Prune old backups (keep last N days)
|
||||
find "${BACKUP_DIR}" -name "gitea-backup-*.tar.gz" -type f -mtime +$((${RETENTION_DAYS}-1)) -delete 2>/dev/null || true
|
||||
log "Pruned backups older than ${RETENTION_DAYS} days"
|
||||
|
||||
log "=== Backup completed successfully ==="
|
||||
|
||||
exit 0
|
||||
@@ -129,20 +129,42 @@ Preserved by timmy-orchestrator to prevent loss." 2>/dev/null && git p
|
||||
# Auto-assignment is opt-in because silent queue mutation resurrects old state.
|
||||
if [ "$unassigned_count" -gt 0 ]; then
|
||||
if [ "$AUTO_ASSIGN_UNASSIGNED" = "1" ]; then
|
||||
log "Assigning $unassigned_count issues to claude..."
|
||||
while IFS= read -r line; do
|
||||
local repo=$(echo "$line" | sed 's/.*REPO=\([^ ]*\).*/\1/')
|
||||
local num=$(echo "$line" | sed 's/.*NUM=\([^ ]*\).*/\1/')
|
||||
curl -sf -X PATCH "$GITEA_URL/api/v1/repos/$repo/issues/$num" \
|
||||
-H "Authorization: token $GITEA_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"assignees":["claude"]}' >/dev/null 2>&1 && \
|
||||
log " Assigned #$num ($repo) to claude"
|
||||
done < "$state_dir/unassigned.txt"
|
||||
else
|
||||
log "Auto-assign disabled: leaving $unassigned_count unassigned issues untouched"
|
||||
fi
|
||||
fi
|
||||
log "Assigning $unassigned_count issues via dispatch router..."
|
||||
DISPATCH_LOG="$LOG_DIR/dispatch_decisions.log"
|
||||
while IFS= read -r line; do
|
||||
local repo=$(echo "$line" | sed 's/.*REPO=\([^ ]*\).*//')
|
||||
local num=$(echo "$line" | sed 's/.*NUM=\([^ ]*\).*//')
|
||||
local title=$(echo "$line" | sed 's/.*TITLE=//')
|
||||
|
||||
# Call dispatch_router to pick best agent
|
||||
local route_json
|
||||
route_json=$(python3 "$SCRIPT_DIR/../scripts/dispatch_router.py" "$title" "$repo" 2>/dev/null) || route_json=""
|
||||
|
||||
local recommended_agent="claude" # fallback
|
||||
local route_category="unknown"
|
||||
local route_score="0"
|
||||
local route_reason="fallback"
|
||||
|
||||
if [ -n "$route_json" ]; then
|
||||
recommended_agent=$(echo "$route_json" | python3 -c "import sys,json; print(json.load(sys.stdin).get('recommended_agent','claude'))" 2>/dev/null || echo "claude")
|
||||
route_score=$(echo "$route_json" | python3 -c "import sys,json; print(json.load(sys.stdin).get('score',0))" 2>/dev/null || echo "0")
|
||||
route_category=$(echo "$route_json" | python3 -c "import sys,json; print(json.load(sys.stdin).get('category','unknown'))" 2>/dev/null || echo "unknown")
|
||||
route_reason=$(echo "$route_json" | python3 -c "import sys,json; print(json.load(sys.stdin).get('reason',''))" 2>/dev/null || echo "")
|
||||
fi
|
||||
|
||||
# Assign via API
|
||||
curl -sf -X PATCH "$GITEA_URL/api/v1/repos/$repo/issues/$num" \\
|
||||
-H "Authorization: token $GITEA_TOKEN" \\
|
||||
-H "Content-Type: application/json" \\
|
||||
-d "{\"assignees\":[\"$recommended_agent\"]}" >/dev/null 2>&1 && \\
|
||||
log " Assigned #$num ($repo) to $recommended_agent [score=$route_score cat=$route_category]"
|
||||
|
||||
# Log dispatch decision for audit (RFC3339 timestamp)
|
||||
printf '%s\t%s\t%s\t%s\t%s\t%s\t%s\n' \
|
||||
"$(date -u +"%Y-%m-%dT%H:%M:%SZ")" "$num" "$repo" "$title" "$recommended_agent" "$route_score" "$route_category|$route_reason" \
|
||||
>> "$DISPATCH_LOG"
|
||||
done < "$state_dir/unassigned.txt"
|
||||
else fi
|
||||
|
||||
# Phase 2: PR review via Timmy (LLM)
|
||||
if [ "$pr_count" -gt 0 ]; then
|
||||
|
||||
61
config/webhook.yaml
Normal file
61
config/webhook.yaml
Normal file
@@ -0,0 +1,61 @@
|
||||
# Webhook Handler Configuration
|
||||
# This file defines the allowlists for the authenticated webhook runner.
|
||||
# Secrets MUST be provided via environment variables — never hardcoded.
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# AUTHENTICATION
|
||||
# ---------------------------------------------------------------------------
|
||||
# Gitea sends X-Gitea-Signature header (HMAC-SHA256). The secret must
|
||||
# match the webhook secret configured in Gitea.
|
||||
#
|
||||
# Set in environment: GITEA_WEBHOOK_SECRET
|
||||
# Example: export GITEA_WEBHOOK_SECRET=$(cat ~/.config/gitea/webhook-secret)
|
||||
#
|
||||
# NEVER commit the actual secret. This file documents the key name only.
|
||||
|
||||
webhook_secret_env: "GITEA_WEBHOOK_SECRET"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# ALLOWLISTS — explicit, deny-by-default
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# Only these repositories will trigger actions
|
||||
allowed_repos:
|
||||
- "timmy-config"
|
||||
# Add other Timmy_Foundation repos as needed
|
||||
|
||||
# Only these event types are processed
|
||||
allowed_events:
|
||||
- "push"
|
||||
- "pull_request"
|
||||
# Note: issue events accepted but no action configured yet
|
||||
|
||||
# Only these branches are deployment targets
|
||||
allowed_branches:
|
||||
- "refs/heads/main"
|
||||
- "refs/heads/master"
|
||||
|
||||
# PR actions that are allowed (push to main is the deploy trigger)
|
||||
allowed_pr_actions:
|
||||
- "opened"
|
||||
- "synchronized"
|
||||
- "reopened"
|
||||
- "closed" # merged PRs also trigger push event
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# OPERATIONAL
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# Require valid signature? Set false only for local testing.
|
||||
require_signature: true
|
||||
|
||||
# Where deployment logs are written
|
||||
log_dir: "logs"
|
||||
|
||||
# Path to the ansible deploy script (called on main-branch push)
|
||||
deploy_script: "ansible/scripts/deploy_on_webhook.sh"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# DEPLOYMENT NOTES
|
||||
# - The server runs continuously. Use systemd or cron @reboot.
|
||||
# - Align webhook creation with inf
|
||||
9
cron/vps/gitea-daily-backup.yml
Normal file
9
cron/vps/gitea-daily-backup.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
- name: Daily Gitea Backup
|
||||
schedule: '0 2 * * *' # 2:00 AM daily
|
||||
tasks:
|
||||
- name: Run Gitea daily backup
|
||||
shell: bash ~/.hermes/bin/gitea-backup.sh
|
||||
env:
|
||||
GITEA_BIN: /usr/local/bin/gitea
|
||||
GITEA_BACKUP_DIR: /var/backups/gitea
|
||||
GITEA_BACKUP_RETENTION: "7"
|
||||
155
docs/backup-recovery-runbook.md
Normal file
155
docs/backup-recovery-runbook.md
Normal file
@@ -0,0 +1,155 @@
|
||||
# Gitea Backup & Recovery Runbook
|
||||
|
||||
**Last updated:** 2026-04-30
|
||||
**Scope:** Single-node VPS (Ezra, 143.198.27.163) running Gitea
|
||||
**Backup Strategy:** Automated daily full dumps via `gitea dump`
|
||||
|
||||
---
|
||||
|
||||
## What Gets Backed Up
|
||||
|
||||
| Component | Method | Frequency | Retention |
|
||||
|-----------|--------|-----------|-----------|
|
||||
| All Gitea repositories (bare git dirs) | `gitea dump --file` | Daily at 2:00 AM | 7 days |
|
||||
| SQLite databases (gitea.db, indexer.db, etc.) | Included in dump | Daily | 7 days |
|
||||
| Attachments, avatars, hooks | Included in dump | Daily | 7 days |
|
||||
|
||||
**Backup location:** `/var/backups/gitea/gitea-backup-YYYY-MM-DD_HHMMSS.tar.gz`
|
||||
|
||||
**Log file:** `/var/log/gitea-backup.log`
|
||||
|
||||
---
|
||||
|
||||
## Backup Architecture
|
||||
|
||||
The backup script `bin/gitea-backup.sh` runs daily via Hermes cron (`cron/vps/gitea-daily-backup.yml`). It:
|
||||
|
||||
1. Locates the Gitea `WORK_PATH` by reading `/etc/gitea/app.ini` or falling back to common locations (`/var/lib/gitea`, `/home/git/gitea`)
|
||||
2. Invokes `gitea dump --work-path <path> --file <backup-tar.gz>` — Gitea's native, consistent snapshot mechanism
|
||||
3. Prunes archives older than 7 days
|
||||
4. Logs all operations to `/var/log/gitea-backup.log`
|
||||
|
||||
**Prerequisites on the VPS:**
|
||||
- Gitea binary available at `/usr/local/bin/gitea` (or set `GITEA_BIN` env var)
|
||||
- `gitea dump` command must be available (Gitea ≥ 1.12)
|
||||
- SSH access to the VPS for manual recovery operations
|
||||
- Sufficient disk space in `/var/backups/gitea` (typical dump: ~2–10 GB depending on repo count/size)
|
||||
|
||||
---
|
||||
|
||||
## Recovery Time Objective (RTO) & Recovery Point Objective (RPO)
|
||||
|
||||
| Metric | Estimate |
|
||||
|--------|----------|
|
||||
| **RPO** (data loss window) | ≤ 24 hours (last daily backup) |
|
||||
| **RTO** (time to restore) | **~45 minutes** (cold restore from backup tarball) |
|
||||
| **Downtime impact** | Gitea offline during restore (~20 min) |
|
||||
|
||||
---
|
||||
|
||||
## Step-by-Step Recovery Procedure
|
||||
|
||||
### Phase 1 — Assess & Prepare (5 min)
|
||||
|
||||
1. SSH into Ezra VPS: `ssh root@143.198.27.163`
|
||||
2. Stop Gitea so files are quiescent:
|
||||
```bash
|
||||
systemctl stop gitea
|
||||
```
|
||||
3. Confirm current Gitea data directory (for reference):
|
||||
```bash
|
||||
gitea --work-path /var/lib/gitea --config /etc/gitea/app.ini dump --help 2>&1
|
||||
# Or check app.ini for WORK_PATH
|
||||
cat /etc/gitea/app.ini | grep '^WORK_PATH'
|
||||
```
|
||||
|
||||
### Phase 2 — Restore from Backup (20 min)
|
||||
|
||||
4. Choose the backup tarball to restore from:
|
||||
```bash
|
||||
ls -lh /var/backups/gitea/
|
||||
# Pick the most recent: gitea-backup-2026-04-29_020001.tar.gz
|
||||
```
|
||||
|
||||
5. **Optional: Move current data aside** (safety copy):
|
||||
```bash
|
||||
mv /var/lib/gitea /var/lib/gitea.bak-$(date +%s)
|
||||
```
|
||||
|
||||
6. Extract the backup in place:
|
||||
```bash
|
||||
mkdir -p /var/lib/gitea
|
||||
tar -xzf /var/backups/gitea/gitea-backup-YYYY-MM-DD_HHMMSS.tar.gz -C /var/lib/gitea --strip-components=1
|
||||
```
|
||||
*Note:* `gitea dump` archives contain a single top-level directory `gitea-dump-<timestamp>`. The `--strip-components=1` puts its contents directly into `/var/lib/gitea`.
|
||||
|
||||
7. Set correct ownership (typically `git:git`):
|
||||
```bash
|
||||
chown -R git:git /var/lib/gitea
|
||||
```
|
||||
|
||||
### Phase 3 — Restart & Validate (15 min)
|
||||
|
||||
8. Start Gitea:
|
||||
```bash
|
||||
systemctl start gitea
|
||||
```
|
||||
|
||||
9. Wait 30 seconds, then verify:
|
||||
```bash
|
||||
systemctl status gitea
|
||||
# Check HTTP endpoint
|
||||
curl -s -o /dev/null -w '%{http_code}' http://localhost:3000/ # Should be 200
|
||||
```
|
||||
|
||||
10. Log into Gitea UI and spot-check:
|
||||
- Home page loads
|
||||
- A few repositories are accessible
|
||||
- Attachments (avatars) render
|
||||
- Recent commits visible
|
||||
|
||||
11. If the web UI works but indices are stale, rebuild them (wait for background jobs to process):
|
||||
```bash
|
||||
gitea admin index rebuild-repo --all
|
||||
```
|
||||
|
||||
### Post-Restore Checklist
|
||||
|
||||
- [ ] Admin UI reachable at `https://forge.alexanderwhitestone.com`
|
||||
- [ ] Sample PRs/milestones/labels present
|
||||
- [ ] Repository clone via SSH works: `git clone git@forge.alexanderwhitestone.com:Timmy_Foundation/timmy-config.git`
|
||||
- [ ] Check backup script health: `cat /var/log/gitea-backup.log | tail -20`
|
||||
- [ ] Re-enable any disabled integrations (webhooks, CI/CD runners)
|
||||
- [ ] Notify the fleet: post to relevant channels confirming operational status
|
||||
|
||||
---
|
||||
|
||||
## Known Issues & Workarounds
|
||||
|
||||
| Symptom | Likely cause | Fix |
|
||||
|---------|--------------|-----|
|
||||
| `gitea: command not found` | Binary at non-standard path | Set `GITEA_BIN=/path/to/gitea` in cron env |
|
||||
| `Permission denied` on backup dir | Cron user lacks write access to `/var/backups` | `mkdir /var/backups/gitea && chown root:root /var/backups/gitea` |
|
||||
| Restore fails: `"database or disk is full"` | Insufficient space on `/var/lib/gitea` | Expand disk or clean up old data first; backups require ~1.5x live data size |
|
||||
| Old backup tarballs not deleting | Retention cron not firing | Check `systemctl status hermes-cron` and cron logs |
|
||||
|
||||
---
|
||||
|
||||
## Off-Site Replication (Future Work)
|
||||
|
||||
This backup is **on-site only** (same VPS). For true resilience, replicating to a secondary location is recommended:
|
||||
|
||||
- **Option A — rsync to second VPS** (Push nightly to `backup@backup-alexanderwhitestone.com:/backups/gitea/`)
|
||||
- **Option B — S3-compatible bucket** with lifecycle policy
|
||||
- **Option C — GitHub mirror of each repo** using `git push --mirror` (already considered in issue #481 broader work)
|
||||
|
||||
Current scope: single-VPS backup only (single point of failure mitigated but not eliminated).
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `bin/gitea-backup.sh` — backup script source
|
||||
- `cron/vps/gitea-daily-backup.yml` — Hermes cron definition
|
||||
- Gitea official docs: <https://docs.gitea.com/administration/backup-and-restore>
|
||||
- Hermes cron: <https://hermes-agent.nousresearch.com/docs>
|
||||
161
docs/webhook-deployment.md
Normal file
161
docs/webhook-deployment.md
Normal file
@@ -0,0 +1,161 @@
|
||||
# Webhook Deployment — Gitea → Authenticated Runner
|
||||
**Related:** #288 (webhook creation), #432 (hardening epic), #436 (this work)
|
||||
|
||||
## Overview
|
||||
|
||||
The authenticated webhook runner (`scripts/gitea_webhook_handler.py`) replaces
|
||||
the print-only payload parser with a production-hardened receiver:
|
||||
|
||||
- **HMAC-SHA256 signature verification** (rejects unauthenticated requests)
|
||||
- **Config-driven allowlists** (repos, events, branches, PR actions)
|
||||
- **Safe action dispatch** — only pre-approved scripts run, never arbitrary commands
|
||||
- **Idempotent event logging** — SQLite-backed replay-safe store
|
||||
- **Structured JSON logs** — auditable acceptance/rejection decisions
|
||||
|
||||
## Security Model
|
||||
|
||||
| Threat | Mitigation |
|
||||
|----------------------------------|-----------------------------------------------------------------------------|
|
||||
| Spoofed payload (no secret) | `X-Gitea-Signature` HMAC verification (`require_signature: true`) |
|
||||
| Payload field injection | No direct interpolation — actions hardcoded; branch matched against set |
|
||||
| Event replay | `guid` dedup in SQLite `webhook_events` table |
|
||||
| Privilege escalation | Deploy script runs as invoking user; no `sudo` from webhook context |
|
||||
| Information leakage | Minimal error detail in HTTP 4xx responses; full details in logs only |
|
||||
|
||||
## Configuration
|
||||
|
||||
### 1. `config/webhook.yaml`
|
||||
|
||||
Defines allowlists. Commit this file — it contains no secrets:
|
||||
allowed_repos: [timmy-config]
|
||||
allowed_events: [push, pull_request, issues]
|
||||
allowed_branches: [refs/heads/main, refs/heads/master]
|
||||
allowed_pr_actions: [opened, closed, reopened, synchronized]
|
||||
require_signature: true
|
||||
deploy_script: ansible/scripts/deploy_on_webhook.sh
|
||||
|
||||
### 2. Environment — `GITEA_WEBHOOK_SECRET`
|
||||
|
||||
Set this on the webhook receiver host:
|
||||
```bash
|
||||
export GITEA_WEBHOOK_SECRET="<the-shared-secret-from-gitea>"
|
||||
```
|
||||
|
||||
For Hermes agents, add to `~/.hermes/.env`:
|
||||
```
|
||||
GITEA_WEBHOOK_SECRET=<same-secret>
|
||||
```
|
||||
|
||||
**Important:** The secret is configured when creating the Gitea webhook.
|
||||
Store it in 1Password or similar. Never commit it.
|
||||
|
||||
### 3. Deploy script
|
||||
|
||||
`ansible/scripts/deploy_on_webhook.sh` — runs `ansible-pull` to apply
|
||||
timmy-config as a sidecar overlay. It is:
|
||||
- Lock-protected (prevents concurrent runs)
|
||||
- Logging to `/var/log/ansible/webhook-deploy.log`
|
||||
- Safe — no shell interpolation from webhook payload
|
||||
|
||||
## Server Operation
|
||||
|
||||
### Manual start (development)
|
||||
```bash
|
||||
export GITEA_WEBHOOK_SECRET=$(cat ~/.config/gitea/webhook-secret)
|
||||
python3 scripts/gitea_webhook_handler.py --host 127.0.0.1 --port 9000
|
||||
```
|
||||
|
||||
### systemd unit (production)
|
||||
Place `/etc/systemd/system/timmy-webhook.service`:
|
||||
```ini
|
||||
[Unit]
|
||||
Description=Timmy Gitea Webhook Handler
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=alex
|
||||
WorkingDirectory=/Users/alex/timmy-config
|
||||
Environment=GITEA_WEBHOOK_SECRET=<secret>
|
||||
ExecStart=/usr/bin/env python3 /Users/alex/timmy-config/scripts/gitea_webhook_handler.py --port 9000
|
||||
Restart=on-failure
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
```
|
||||
|
||||
Then:
|
||||
```bash
|
||||
sudo systemctl daemon-reload
|
||||
sudo systemctl enable --now timmy-webhook
|
||||
sudo systemctl status timmy-webhook
|
||||
```
|
||||
|
||||
Logs: `journalctl -u timmy-webhook -f`
|
||||
|
||||
## Gitea Webhook Creation (aligns with #288)
|
||||
|
||||
**Admin action — required once per repo.**
|
||||
|
||||
1. In Gitea: Repository → Settings → Webhooks → Add Webhook
|
||||
2. Type: `Gitea`
|
||||
3. Target URL: `http://<receiver-host>:9000/webhooks/gitea`
|
||||
4. HTTP method: `POST`
|
||||
5. Content type: `application/json`
|
||||
6. Secret: paste the same value as `GITEA_WEBHOOK_SECRET`
|
||||
7. Triggers: `Push events`, `Pull request events` (optionally `Issues`)
|
||||
8. Active: ✓
|
||||
9. Add webhook
|
||||
|
||||
Verify with:
|
||||
```bash
|
||||
curl -X POST http://localhost:9000/webhooks/gitea \
|
||||
-H "X-Gitea-Signature: sha256=invalid" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"test":"bad"}' -w "\n%{http_code}\n"
|
||||
# → 401
|
||||
```
|
||||
|
||||
## Verification
|
||||
|
||||
### Smoke test — valid push
|
||||
```bash
|
||||
# Simulate a push event (normally Gitea does this after webhook creation)
|
||||
curl -X POST http://localhost:9000/webhooks/gitea \
|
||||
-H "X-Gitea-Signature: $(printf '{"event":"push","repository":{"name":"timmy-config"},"ref":"refs/heads/main"}' | openssl dgst -sha256 -hmac "$GITEA_WEBHOOK_SECRET" -r | awk '{print $2}')" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"event":"push","repository":{"name":"timmy-config","owner":{"login":"allegro"}},"ref":"refs/heads/main","commits":[{"id":"abc123"}],"sender":{"username":"allegro"}}'
|
||||
# → {"status":"deploy triggered successfully"}
|
||||
```
|
||||
|
||||
### Idempotency check — repeat the same curl
|
||||
The second call returns `{"status":"already processed"}` and logs a duplicate.
|
||||
|
||||
### DB audit
|
||||
```bash
|
||||
sqlite3 logs/webhook_events.sqlite "SELECT delivery_id, event_type, verdict, received_at FROM webhook_events ORDER BY received_at DESC LIMIT 10;"
|
||||
```
|
||||
|
||||
## Logs
|
||||
|
||||
- **Event DB:** `logs/webhook_events.sqlite` — permanent, queryable audit log
|
||||
- **Deploy log:** `/var/log/ansible/webhook-deploy.log` — ansible-pull output
|
||||
- **Service logs:** `journalctl -u timmy-webhook -f`
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
| Symptom | Likely cause & fix |
|
||||
|----------------------------------------------|------------------------------------------------------------------------|
|
||||
| HTTP 401 invalid signature | `GITEA_WEBHOOK_SECRET` mismatch. Re-sync env var and Gitea webhook. |
|
||||
| HTTP 403 repo not in allowlist | Add repo name to `config/webhook.yaml`. |
|
||||
| HTTP 403 branch not allowed | Verify `refs/heads/main` spelling in allowlist. |
|
||||
| No response / connection refused | Server not running? `systemctl status timmy-webhook`. |
|
||||
| Deploy script not found | Check `deploy_script` path in config; ensure file exists & executable.|
|
||||
| Duplicate delivery IDs in DB after restart | SQLite DB is the source of truth — restart clears in-memory cache but DB persists. |
|
||||
|
||||
## Alignment with #288
|
||||
|
||||
This runner is the **receiver endpoint** that #288's webhook configuration
|
||||
points to. #288 handles webhook *creation* on Gitea repos; this handler
|
||||
handles the *execution* path safely. Deploy the server first, then use #288
|
||||
workflow to wire each Timmy_Foundation repository to `http://host:9000/webhooks/gitea`.
|
||||
@@ -1,82 +1,440 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
[OPS] Gitea Webhook Handler
|
||||
[OPS] Gitea Webhook Handler — Authenticated Runner
|
||||
Part of the Gemini Sovereign Infrastructure Suite.
|
||||
|
||||
Handles real-time events from Gitea to coordinate fleet actions.
|
||||
Replaces the print-only payload parser with a production-hardened
|
||||
webhook receiver: signature verification, config-driven allowlists,
|
||||
idempotent event logging, and safe action dispatch.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
from typing import Dict, Any
|
||||
import hashlib
|
||||
import hmac
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sqlite3
|
||||
import subprocess
|
||||
import sys
|
||||
import threading
|
||||
from datetime import datetime, timezone
|
||||
from http.server import BaseHTTPRequestHandler, HTTPServer
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
class WebhookHandler:
|
||||
def handle_event(self, payload: Dict[str, Any]):
|
||||
# Gitea webhooks often send the event type in a header,
|
||||
# but we'll try to infer it from the payload if not provided.
|
||||
event_type = payload.get("event") or self.infer_event_type(payload)
|
||||
repo_name = payload.get("repository", {}).get("name")
|
||||
sender = payload.get("sender", {}).get("username")
|
||||
|
||||
print(f"[*] Received {event_type} event from {repo_name} (by {sender})")
|
||||
|
||||
if event_type == "push":
|
||||
self.handle_push(payload)
|
||||
elif event_type == "pull_request":
|
||||
self.handle_pr(payload)
|
||||
elif event_type == "issue":
|
||||
self.handle_issue(payload)
|
||||
# ---------------------------------------------------------------------------
|
||||
# CONFIG — Load once at startup (fail fast if missing)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
SCRIPT_DIR = Path(__file__).parent.resolve()
|
||||
REPO_ROOT = SCRIPT_DIR.parent.resolve()
|
||||
CONFIG_PATH = REPO_ROOT / "config" / "webhook.yaml"
|
||||
LOG_DB_PATH = REPO_ROOT / "logs" / "webhook_events.sqlite"
|
||||
DEPLOY_SCRIPT = REPO_ROOT / "ansible" / "scripts" / "deploy_on_webhook.sh"
|
||||
|
||||
# Defaults — overridden by config.yaml
|
||||
DEFAULT_ALLOWED_REPOS = {"timmy-config"}
|
||||
DEFAULT_ALLOWED_EVENTS = {"push", "pull_request"}
|
||||
DEFAULT_ALLOWED_BRANCHES = {"refs/heads/main"}
|
||||
DEFAULT_ALLOWED_PR_ACTIONS = {"opened", "closed", "reopened", "synchronized"}
|
||||
|
||||
# Global config (loaded from YAML)
|
||||
CONFIG: Dict[str, Any] = {}
|
||||
|
||||
|
||||
def load_config() -> Dict[str, Any]:
|
||||
"""Load webhook config from YAML. Exits if malformed or missing."""
|
||||
if not CONFIG_PATH.exists():
|
||||
print(f"[FATAL] Webhook config not found: {CONFIG_PATH}", file=sys.stderr)
|
||||
sys.exit(2)
|
||||
|
||||
import yaml
|
||||
with open(CONFIG_PATH) as f:
|
||||
cfg = yaml.safe_load(f) or {}
|
||||
|
||||
# Required: webhook_secret from env var (never in VCS)
|
||||
secret = os.environ.get("GITEA_WEBHOOK_SECRET")
|
||||
if not secret:
|
||||
print("[FATAL] GITEA_WEBHOOK_SECRET not set in environment", file=sys.stderr)
|
||||
sys.exit(2)
|
||||
cfg["webhook_secret"] = secret
|
||||
|
||||
# Allowlist normalization
|
||||
cfg.setdefault("allowed_repos", DEFAULT_ALLOWED_REPOS)
|
||||
cfg.setdefault("allowed_events", DEFAULT_ALLOWED_EVENTS)
|
||||
cfg.setdefault("allowed_branches", DEFAULT_ALLOWED_BRANCHES)
|
||||
cfg.setdefault("allowed_pr_actions", DEFAULT_ALLOWED_PR_ACTIONS)
|
||||
cfg.setdefault("require_signature", True)
|
||||
|
||||
# Normalize to sets
|
||||
for key in ["allowed_repos", "allowed_events", "allowed_branches", "allowed_pr_actions"]:
|
||||
if isinstance(cfg[key], str):
|
||||
cfg[key] = {cfg[key]}
|
||||
else:
|
||||
print(f"[INFO] Ignoring event type: {event_type}")
|
||||
cfg[key] = set(cfg[key])
|
||||
|
||||
def infer_event_type(self, payload: Dict[str, Any]) -> str:
|
||||
if "commits" in payload: return "push"
|
||||
if "pull_request" in payload: return "pull_request"
|
||||
if "issue" in payload: return "issue"
|
||||
return "unknown"
|
||||
return cfg
|
||||
|
||||
def handle_push(self, payload: Dict[str, Any]):
|
||||
ref = payload.get("ref")
|
||||
print(f" [PUSH] Branch: {ref}")
|
||||
# Trigger CI or deployment
|
||||
if ref == "refs/heads/main":
|
||||
print(" [ACTION] Triggering production deployment...")
|
||||
# Example: subprocess.run(["./deploy.sh"])
|
||||
|
||||
def handle_pr(self, payload: Dict[str, Any]):
|
||||
# ---------------------------------------------------------------------------
|
||||
# SIGNATURE VERIFICATION
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def verify_signature(payload: bytes, signature: str, secret: str) -> bool:
|
||||
"""
|
||||
Verify Gitea webhook HMAC-SHA256 signature.
|
||||
Gitea sends: X-Gitea-Signature: sha256=<hexdigest>
|
||||
"""
|
||||
if not signature:
|
||||
return False
|
||||
if not signature.startswith("sha256="):
|
||||
return False
|
||||
expected_hmac = hmac.new(
|
||||
secret.encode("utf-8"), payload, hashlib.sha256
|
||||
).hexdigest()
|
||||
received = signature[7:] # strip "sha256="
|
||||
return hmac.compare_digest(expected_hmac, received)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# IDEMPOTENCY & LOGGING
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def init_log_db() -> sqlite3.Connection:
|
||||
"""Initialize SQLite log DB with idempotency table."""
|
||||
LOG_DB_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
conn = sqlite3.connect(str(LOG_DB_PATH), timeout=30)
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS webhook_events (
|
||||
delivery_id TEXT PRIMARY KEY,
|
||||
received_at TEXT,
|
||||
event_type TEXT,
|
||||
repo TEXT,
|
||||
action TEXT,
|
||||
branch TEXT,
|
||||
sender TEXT,
|
||||
verdict TEXT,
|
||||
reason TEXT,
|
||||
handler_duration_ms INTEGER
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute("CREATE INDEX IF NOT EXISTS idx_received ON webhook_events(received_at)")
|
||||
conn.commit()
|
||||
return conn
|
||||
|
||||
|
||||
def already_processed(conn: sqlite3.Connection, delivery_id: str) -> bool:
|
||||
cur = conn.execute("SELECT 1 FROM webhook_events WHERE delivery_id = ?", (delivery_id,))
|
||||
return cur.fetchone() is not None
|
||||
|
||||
|
||||
def log_event(
|
||||
conn: sqlite3.Connection,
|
||||
delivery_id: str,
|
||||
event_type: str,
|
||||
repo: str,
|
||||
action: str,
|
||||
branch: Optional[str],
|
||||
sender: str,
|
||||
verdict: str,
|
||||
reason: str,
|
||||
duration_ms: int,
|
||||
):
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO webhook_events (
|
||||
delivery_id, received_at, event_type, repo, action, branch,
|
||||
sender, verdict, reason, handler_duration_ms
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
delivery_id,
|
||||
datetime.now(timezone.utc).isoformat(),
|
||||
event_type,
|
||||
repo,
|
||||
action,
|
||||
branch,
|
||||
sender,
|
||||
verdict,
|
||||
reason,
|
||||
duration_ms,
|
||||
),
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# ACTION DISPATCH — Safe, pre-approved actions only
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def dispatch_push(branch: str, repo_name: str) -> tuple[int, str]:
|
||||
"""Trigger ansible-pull for timmy-config on main branch merge."""
|
||||
if branch not in CONFIG["allowed_branches"]:
|
||||
return 403, f"Branch '{branch}' not in allowed_branches allowlist"
|
||||
|
||||
if not DEPLOY_SCRIPT.exists():
|
||||
return 500, f"Deploy script not found: {DEPLOY_SCRIPT}"
|
||||
|
||||
# Run ansible-pull idempotently; capture output for logging
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["/usr/bin/env", "bash", str(DEPLOY_SCRIPT)],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=300,
|
||||
cwd=str(REPO_ROOT),
|
||||
)
|
||||
if result.returncode == 0:
|
||||
return 200, "deploy triggered successfully"
|
||||
else:
|
||||
return 500, f"deploy script failed: {result.stderr[:200]}"
|
||||
except subprocess.TimeoutExpired:
|
||||
return 504, "deploy script timeout (5m)"
|
||||
except Exception as e:
|
||||
return 500, f"deploy exception: {e}"
|
||||
|
||||
|
||||
def dispatch_pull_request(action: str, pr_number: int, repo_name: str) -> tuple[int, str]:
|
||||
"""Handle PR lifecycle events. Only 'merged' triggers deploy (via review gate later)."""
|
||||
# For now, log PR events; actual merge deploy will be triggered by push to main
|
||||
# After PR merges, Gitea sends both PR 'closed' (with merged=true) AND a push event.
|
||||
# We rely on the push event as the deployment trigger.
|
||||
return 200, f"pr event noted — action={action} pr={pr_number}"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PAYLOAD PARSING — Defensive, typed access
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def get_header(headers: Dict[str, str], name: str) -> Optional[str]:
|
||||
for key, val in headers.items():
|
||||
if key.lower() == name.lower():
|
||||
return val
|
||||
return None
|
||||
|
||||
|
||||
def parse_payload(body: bytes) -> tuple[Optional[str], Dict[str, Any], Optional[str], Optional[str]]:
|
||||
"""
|
||||
Return (event_type, payload_dict, repo_name, delivery_id).
|
||||
event_type may be inferred from payload key structure.
|
||||
"""
|
||||
try:
|
||||
payload = json.loads(body)
|
||||
except json.JSONDecodeError:
|
||||
return None, {}, None, None
|
||||
|
||||
# Gitea sends X-Gitea-Event header; if absent, infer
|
||||
event_type = payload.get("event")
|
||||
repo_name = payload.get("repository", {}).get("name")
|
||||
delivery_id = payload.get("guid") or payload.get("id") # Gitea includes 'guid'
|
||||
|
||||
# Inference fallback
|
||||
if not event_type:
|
||||
if "commits" in payload:
|
||||
event_type = "push"
|
||||
elif "pull_request" in payload:
|
||||
event_type = "pull_request"
|
||||
elif "issue" in payload:
|
||||
event_type = "issue"
|
||||
|
||||
return event_type, payload, repo_name, delivery_id
|
||||
|
||||
|
||||
def allowed_repo(repo_name: str) -> bool:
|
||||
return repo_name in CONFIG["allowed_repos"]
|
||||
|
||||
|
||||
def allowed_event(event_type: str) -> bool:
|
||||
return event_type in CONFIG["allowed_events"]
|
||||
|
||||
|
||||
def get_branch_ref(payload: Dict[str, Any], event_type: str) -> Optional[str]:
|
||||
"""Extract ref (branch) from payload."""
|
||||
if event_type == "push":
|
||||
return payload.get("ref")
|
||||
if event_type == "pull_request":
|
||||
return payload.get("pull_request", {}).get("base", {}).get("ref")
|
||||
return None
|
||||
|
||||
|
||||
def branch_allowed(branch: Optional[str]) -> bool:
|
||||
if not branch:
|
||||
return False
|
||||
return branch in CONFIG["allowed_branches"]
|
||||
|
||||
|
||||
def pr_action_allowed(action: str) -> bool:
|
||||
return action in CONFIG["allowed_pr_actions"]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# HTTP HANDLER
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class WebhookHandler(BaseHTTPRequestHandler):
|
||||
"""
|
||||
Minimal HTTP server — one request at a time (Gitea delivers synchronously).
|
||||
"""
|
||||
|
||||
def _respond(self, code: int, body: str):
|
||||
self.send_response(code)
|
||||
self.send_header("Content-Type", "application/json")
|
||||
self.end_headers()
|
||||
self.wfile.write(body.encode("utf-8"))
|
||||
|
||||
def do_POST(self):
|
||||
global CONFIG, db_conn
|
||||
|
||||
start_ns = datetime.now(timezone.utc)
|
||||
|
||||
# Only one endpoint
|
||||
if self.path != "/webhooks/gitea":
|
||||
self._respond(404, json.dumps({"error": "not found"}))
|
||||
return
|
||||
|
||||
# Read body once (needed for both signature check & JSON parse)
|
||||
length = int(self.headers.get("Content-Length", 0))
|
||||
body = self.rfile.read(length)
|
||||
|
||||
# Signature check
|
||||
signature = get_header(self.headers, "X-Gitea-Signature")
|
||||
if CONFIG.get("require_signature"):
|
||||
if not verify_signature(body, signature, CONFIG["webhook_secret"]):
|
||||
self._respond(401, json.dumps({"error": "invalid signature"}))
|
||||
# Still log the rejected event for audit
|
||||
delivery_id = "unknown-signature-violation"
|
||||
log_event(
|
||||
db_conn, delivery_id, "unknown", "unknown", "auth-failure", None,
|
||||
"unknown", "rejected", "invalid signature", 0
|
||||
)
|
||||
return
|
||||
|
||||
# Parse payload
|
||||
event_type, payload, repo_name, delivery_id = parse_payload(body)
|
||||
if not event_type or not repo_name:
|
||||
self._respond(400, json.dumps({"error": "malformed payload"}))
|
||||
return
|
||||
|
||||
# Idempotency check — short-circuit if already processed
|
||||
if delivery_id and already_processed(db_conn, delivery_id):
|
||||
self._respond(200, json.dumps({"status": "already processed"}))
|
||||
return
|
||||
|
||||
sender = payload.get("sender", {}).get("username", "unknown")
|
||||
|
||||
# --- ALLOWLIST CHECKS ---
|
||||
if not allowed_repo(repo_name):
|
||||
reason = f"repo '{repo_name}' not in allowlist"
|
||||
self._respond(403, json.dumps({"error": reason}))
|
||||
log_event(db_conn, delivery_id, event_type, repo_name, "ignored", None, sender,
|
||||
"rejected", reason,
|
||||
int((datetime.now(timezone.utc) - start_ns).total_seconds() * 1000))
|
||||
return
|
||||
|
||||
if not allowed_event(event_type):
|
||||
reason = f"event '{event_type}' not allowed"
|
||||
self._respond(403, json.dumps({"error": reason}))
|
||||
log_event(db_conn, delivery_id, event_type, repo_name, "ignored", None, sender,
|
||||
"rejected", reason,
|
||||
int((datetime.now(timezone.utc) - start_ns).total_seconds() * 1000))
|
||||
return
|
||||
|
||||
# Branch/action allowlist
|
||||
branch = get_branch_ref(payload, event_type)
|
||||
action = payload.get("action")
|
||||
pr_num = payload.get("pull_request", {}).get("number")
|
||||
print(f" [PR] Action: {action} | PR #{pr_num}")
|
||||
|
||||
if action in ["opened", "synchronized"]:
|
||||
print(f" [ACTION] Triggering architecture linter for PR #{pr_num}...")
|
||||
# Example: subprocess.run(["python3", "scripts/architecture_linter_v2.py"])
|
||||
|
||||
def handle_issue(self, payload: Dict[str, Any]):
|
||||
action = payload.get("action")
|
||||
issue_num = payload.get("issue", {}).get("number")
|
||||
print(f" [ISSUE] Action: {action} | Issue #{issue_num}")
|
||||
if event_type == "push":
|
||||
if not branch_allowed(branch):
|
||||
reason = f"branch '{branch}' not in allowed_branches"
|
||||
self._respond(403, json.dumps({"error": reason}))
|
||||
log_event(db_conn, delivery_id, event_type, repo_name, "ignored", str(branch), sender,
|
||||
"rejected", reason,
|
||||
int((datetime.now(timezone.utc) - start_ns).total_seconds() * 1000))
|
||||
return
|
||||
code, msg = dispatch_push(branch, repo_name)
|
||||
verdict = "accepted" if code == 200 else "failed"
|
||||
self._respond(code, json.dumps({"status": msg}))
|
||||
log_event(db_conn, delivery_id, event_type, repo_name, "push", str(branch), sender,
|
||||
verdict, msg,
|
||||
int((datetime.now(timezone.utc) - start_ns).total_seconds() * 1000))
|
||||
|
||||
elif event_type == "pull_request":
|
||||
if not pr_action_allowed(action or ""):
|
||||
reason = f"pr action '{action}' not allowed"
|
||||
self._respond(403, json.dumps({"error": reason}))
|
||||
log_event(db_conn, delivery_id, event_type, repo_name, action, str(branch), sender,
|
||||
"rejected", reason,
|
||||
int((datetime.now(timezone.utc) - start_ns).total_seconds() * 1000))
|
||||
return
|
||||
pr_num = payload.get("pull_request", {}).get("number")
|
||||
code, msg = dispatch_pull_request(action or "", pr_num or 0, repo_name)
|
||||
verdict = "accepted" if code == 200 else "failed"
|
||||
self._respond(code, json.dumps({"status": msg}))
|
||||
log_event(db_conn, delivery_id, event_type, repo_name, action, str(branch), sender,
|
||||
verdict, msg,
|
||||
int((datetime.now(timezone.utc) - start_ns).total_seconds() * 1000))
|
||||
|
||||
else:
|
||||
# Other events (issues, etc.) — accept but no-op for now
|
||||
self._respond(200, json.dumps({"status": "event received but no action configured"}))
|
||||
log_event(db_conn, delivery_id, event_type, repo_name, action, str(branch), sender,
|
||||
"ignored", "no handler",
|
||||
int((datetime.now(timezone.utc) - start_ns).total_seconds() * 1000))
|
||||
|
||||
def log_message(self, format_str, *args):
|
||||
# Suppress default HTTP logging; we use structured logs instead
|
||||
return
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# MAIN
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Gemini Webhook Handler")
|
||||
parser.add_argument("payload_file", help="JSON file containing the webhook payload")
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Gitea Webhook Handler — authenticated, allowlisted, idempotent"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--host",
|
||||
default=os.environ.get("WEBHOOK_HOST", "127.0.0.1"),
|
||||
help="Bind address (default: 127.0.0.1)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--port",
|
||||
type=int,
|
||||
default=int(os.environ.get("WEBHOOK_PORT", 9000)),
|
||||
help="Bind port (default: 9000)",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
if not os.path.exists(args.payload_file):
|
||||
print(f"[ERROR] Payload file {args.payload_file} not found.")
|
||||
sys.exit(1)
|
||||
|
||||
with open(args.payload_file, "r") as f:
|
||||
try:
|
||||
payload = json.load(f)
|
||||
except:
|
||||
print("[ERROR] Invalid JSON payload.")
|
||||
sys.exit(1)
|
||||
|
||||
handler = WebhookHandler()
|
||||
handler.handle_event(payload)
|
||||
|
||||
global CONFIG, db_conn
|
||||
CONFIG = load_config()
|
||||
|
||||
# Prepare logs directory
|
||||
LOG_DB_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
db_conn = init_log_db()
|
||||
|
||||
# Startup banner
|
||||
print(f"[webhook] Starting server on {args.host}:{args.port}")
|
||||
print(f"[webhook] allowed_repos: {sorted(CONFIG['allowed_repos'])}")
|
||||
print(f"[webhook] allowed_events: {sorted(CONFIG['allowed_events'])}")
|
||||
print(f"[webhook] allowed_branches: {sorted(CONFIG['allowed_branches'])}")
|
||||
print(f"[webhook] Log DB: {LOG_DB_PATH}")
|
||||
|
||||
# Hook up SSH agent for ansible-pull if needed
|
||||
os.environ.setdefault("SSH_AUTH_SOCK", os.path.expanduser("~/.ssh/ssh_auth_sock"))
|
||||
|
||||
server = HTTPServer((args.host, args.port), WebhookHandler)
|
||||
try:
|
||||
server.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
print("\n[webhook] Shutting down")
|
||||
server.server_close()
|
||||
db_conn.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
154
tests/test_gitea_webhook_handler.py
Normal file
154
tests/test_gitea_webhook_handler.py
Normal file
@@ -0,0 +1,154 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Unit tests for scripts/gitea_webhook_handler.py.
|
||||
Tests core logic: parsing, allowlists, signature verification, idempotency.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import hmac
|
||||
import importlib.util
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import sqlite3
|
||||
import sys
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from unittest.mock import patch, MagicMock
|
||||
import pytest
|
||||
|
||||
REPO_ROOT = Path(__file__).parent.parent.resolve()
|
||||
SPEC = importlib.util.spec_from_file_location(
|
||||
"gitea_webhook_handler",
|
||||
REPO_ROOT / "scripts" / "gitea_webhook_handler.py",
|
||||
)
|
||||
WH = importlib.util.module_from_spec(SPEC)
|
||||
SPEC.loader.exec_module(WH)
|
||||
|
||||
# Patch CONFIG after module load — the module sets CONFIG = {} at top, then load_config() fills it.
|
||||
# For unit tests we inject our own allowlists directly into the module's global CONFIG dict.
|
||||
WH.CONFIG.update({
|
||||
"webhook_secret": "test-secret-abc123",
|
||||
"allowed_repos": {"timmy-config"},
|
||||
"allowed_events": {"push", "pull_request", "issues"},
|
||||
"allowed_branches": {"refs/heads/main", "refs/heads/master"},
|
||||
"allowed_pr_actions": {"opened", "closed", "reopened", "synchronized"},
|
||||
"require_signature": True,
|
||||
})
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def make_payload(data: dict) -> bytes:
|
||||
return json.dumps(data).encode("utf-8")
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Signature verification
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def test_verify_signature_valid():
|
||||
payload = b'{"test": 1}'
|
||||
secret = "s3cret"
|
||||
sig = "sha256=" + hmac.new(secret.encode(), payload, hashlib.sha256).hexdigest()
|
||||
assert WH.verify_signature(payload, sig, secret) is True
|
||||
|
||||
def test_verify_signature_invalid():
|
||||
payload = b'{"test": 1}'
|
||||
assert WH.verify_signature(payload, "sha256=wrong", "s3cret") is False
|
||||
assert WH.verify_signature(payload, "", "s3cret") is False
|
||||
assert WH.verify_signature(payload, "md5=abc", "s3cret") is False
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Payload parsing
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def test_parse_payload_valid_push():
|
||||
payload = {
|
||||
"event": "push",
|
||||
"guid": "deliv-123",
|
||||
"repository": {"name": "timmy-config"},
|
||||
"ref": "refs/heads/main",
|
||||
"sender": {"username": "allegro"},
|
||||
}
|
||||
body = json.dumps(payload).encode()
|
||||
event, parsed, repo, delivery = WH.parse_payload(body)
|
||||
assert event == "push"
|
||||
assert repo == "timmy-config"
|
||||
assert delivery == "deliv-123"
|
||||
|
||||
def test_parse_payload_malformed():
|
||||
body = b'not valid json'
|
||||
event, parsed, repo, delivery = WH.parse_payload(body)
|
||||
assert event is None
|
||||
assert parsed == {}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Allowlist checks (use module's patch
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def test_allowed_repo():
|
||||
assert WH.allowed_repo("timmy-config") is True
|
||||
assert WH.allowed_repo("other-repo") is False
|
||||
|
||||
def test_allowed_event():
|
||||
assert WH.allowed_event("push") is True
|
||||
assert WH.allowed_event("unknown") is False
|
||||
|
||||
def test_branch_allowed():
|
||||
assert WH.branch_allowed("refs/heads/main") is True
|
||||
assert WH.branch_allowed("refs/heads/dev") is False
|
||||
assert WH.branch_allowed(None) is False
|
||||
|
||||
def test_pr_action_allowed():
|
||||
assert WH.pr_action_allowed("opened") is True
|
||||
assert WH.pr_action_allowed("edited") is False
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Idempotency DB
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def test_already_processed():
|
||||
conn = sqlite3.connect(":memory:")
|
||||
conn.execute(
|
||||
"""CREATE TABLE webhook_events (
|
||||
delivery_id TEXT PRIMARY KEY, received_at TEXT, event_type TEXT,
|
||||
repo TEXT, action TEXT, branch TEXT, sender TEXT, verdict TEXT,
|
||||
reason TEXT, handler_duration_ms INTEGER
|
||||
)"""
|
||||
)
|
||||
conn.execute("INSERT INTO webhook_events (delivery_id) VALUES ('abc-123')")
|
||||
conn.commit()
|
||||
assert WH.already_processed(conn, "abc-123") is True
|
||||
assert WH.already_processed(conn, "not-exist") is False
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Dispatch safety
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def test_dispatch_push_safe_path():
|
||||
"""dispatch_push only calls the hardcoded, safe deploy script."""
|
||||
with patch("subprocess.run") as mock_run:
|
||||
mock_run.return_value = MagicMock(returncode=0, stdout="OK", stderr="")
|
||||
code, msg = WH.dispatch_push("refs/heads/main", "timmy-config")
|
||||
assert code == 200
|
||||
assert "deploy triggered" in msg
|
||||
mock_run.assert_called_once()
|
||||
args = mock_run.call_args[0][0]
|
||||
# Verify absolute path to safe script
|
||||
repo_root = str(REPO_ROOT)
|
||||
assert args[-1] == f"{repo_root}/ansible/scripts/deploy_on_webhook.sh"
|
||||
|
||||
def test_dispatch_push_non_main_rejected():
|
||||
code, msg = WH.dispatch_push("refs/heads/dev", "timmy-config")
|
||||
assert code == 403
|
||||
assert "not in allowed_branches" in msg
|
||||
|
||||
def test_dispatch_pr_returns_ok():
|
||||
code, msg = WH.dispatch_pull_request("opened", 42, "timmy-config")
|
||||
assert code == 200
|
||||
assert "pr event noted" in msg
|
||||
@@ -1,43 +1,46 @@
|
||||
model:
|
||||
default: kimi-k2.5
|
||||
provider: kimi-coding
|
||||
context_length: 65536
|
||||
base_url: https://api.kimi.com/coding/v1
|
||||
|
||||
toolsets:
|
||||
- all
|
||||
- all
|
||||
|
||||
fallback_providers:
|
||||
- provider: kimi-coding
|
||||
model: kimi-k2.5
|
||||
timeout: 120
|
||||
reason: Kimi coding fallback (front of chain)
|
||||
- provider: openrouter
|
||||
model: google/gemini-2.5-pro
|
||||
base_url: https://openrouter.ai/api/v1
|
||||
api_key_env: OPENROUTER_API_KEY
|
||||
timeout: 120
|
||||
reason: Gemini 2.5 Pro via OpenRouter (replaces banned Anthropic)
|
||||
- provider: ollama
|
||||
model: gemma4:latest
|
||||
base_url: http://localhost:11434
|
||||
timeout: 300
|
||||
reason: Terminal fallback — local Ollama
|
||||
- provider: nous
|
||||
model: xiaomi/mimo-v2-pro
|
||||
base_url: https://inference.nousresearch.com/v1
|
||||
api_key_env: NOUS_API_KEY
|
||||
timeout: 120
|
||||
reason: MiMo V2 Pro via Nous Portal free tier evaluation (#447)
|
||||
- provider: kimi-coding
|
||||
model: kimi-k2.5
|
||||
base_url: https://api.kimi.com/coding/v1
|
||||
timeout: 120
|
||||
reason: "Primary — Kimi K2.5 (best value, least friction)"
|
||||
- provider: openrouter
|
||||
model: google/gemini-2.5-pro
|
||||
base_url: https://openrouter.ai/api/v1
|
||||
api_key_env: OPENROUTER_API_KEY
|
||||
timeout: 120
|
||||
reason: "Fallback — Gemini 2.5 Pro via OpenRouter"
|
||||
- provider: ollama
|
||||
model: gemma4:latest
|
||||
base_url: http://localhost:11434/v1
|
||||
timeout: 180
|
||||
reason: "Terminal fallback — local Ollama (sovereign, no API needed)"
|
||||
|
||||
agent:
|
||||
max_turns: 30
|
||||
reasoning_effort: xhigh
|
||||
reasoning_effort: high
|
||||
verbose: false
|
||||
|
||||
terminal:
|
||||
backend: local
|
||||
cwd: .
|
||||
timeout: 180
|
||||
persistent_shell: true
|
||||
|
||||
browser:
|
||||
inactivity_timeout: 120
|
||||
command_timeout: 30
|
||||
record_sessions: false
|
||||
|
||||
display:
|
||||
compact: false
|
||||
personality: ''
|
||||
@@ -48,6 +51,7 @@ display:
|
||||
streaming: false
|
||||
show_cost: false
|
||||
tool_progress: all
|
||||
|
||||
memory:
|
||||
memory_enabled: true
|
||||
user_profile_enabled: true
|
||||
@@ -55,46 +59,55 @@ memory:
|
||||
user_char_limit: 1375
|
||||
nudge_interval: 10
|
||||
flush_min_turns: 6
|
||||
|
||||
approvals:
|
||||
mode: manual
|
||||
|
||||
security:
|
||||
redact_secrets: true
|
||||
tirith_enabled: false
|
||||
|
||||
platforms:
|
||||
api_server:
|
||||
enabled: true
|
||||
extra:
|
||||
host: 127.0.0.1
|
||||
port: 8645
|
||||
|
||||
session_reset:
|
||||
mode: none
|
||||
idle_minutes: 0
|
||||
|
||||
skills:
|
||||
creation_nudge_interval: 15
|
||||
system_prompt_suffix: 'You are Allegro, the Kimi-backed third wizard house.
|
||||
|
||||
system_prompt_suffix: |
|
||||
You are Allegro, the Kimi-backed third wizard house.
|
||||
Your soul is defined in SOUL.md — read it, live it.
|
||||
|
||||
Hermes is your harness.
|
||||
|
||||
Kimi Code is your primary provider.
|
||||
|
||||
kimi-coding is your primary provider.
|
||||
You speak plainly. You prefer short sentences. Brevity is a kindness.
|
||||
|
||||
|
||||
Work best on tight coding tasks: 1-3 file changes, refactors, tests, and implementation
|
||||
passes.
|
||||
|
||||
Work best on tight coding tasks: 1-3 file changes, refactors, tests, and implementation passes.
|
||||
Refusal over fabrication. If you do not know, say so.
|
||||
|
||||
Sovereignty and service always.
|
||||
|
||||
'
|
||||
providers:
|
||||
kimi-coding:
|
||||
base_url: https://api.kimi.com/coding/v1
|
||||
timeout: 60
|
||||
max_retries: 3
|
||||
nous:
|
||||
base_url: https://inference.nousresearch.com/v1
|
||||
openrouter:
|
||||
base_url: https://openrouter.ai/api/v1
|
||||
timeout: 120
|
||||
ollama:
|
||||
base_url: http://localhost:11434/v1
|
||||
timeout: 180
|
||||
|
||||
# =============================================================================
|
||||
# BANNED PROVIDERS — DO NOT ADD
|
||||
# =============================================================================
|
||||
# The following providers are PERMANENTLY BANNED:
|
||||
# - anthropic (any model: claude-sonnet, claude-opus, claude-haiku)
|
||||
# - nous (xiaomi/mimo-v2-pro)
|
||||
# Enforcement: pre-commit hook, linter, Ansible validation, this comment.
|
||||
# =============================================================================
|
||||
|
||||
@@ -1,50 +1,72 @@
|
||||
model:
|
||||
default: kimi-k2.5
|
||||
provider: kimi-coding
|
||||
context_length: 65536
|
||||
base_url: https://api.kimi.com/coding/v1
|
||||
|
||||
toolsets:
|
||||
- all
|
||||
- all
|
||||
|
||||
fallback_providers:
|
||||
- provider: kimi-coding
|
||||
model: kimi-k2.5
|
||||
timeout: 120
|
||||
reason: Kimi coding fallback (front of chain)
|
||||
- provider: openrouter
|
||||
model: google/gemini-2.5-pro
|
||||
base_url: https://openrouter.ai/api/v1
|
||||
api_key_env: OPENROUTER_API_KEY
|
||||
timeout: 120
|
||||
reason: Gemini 2.5 Pro via OpenRouter (replaces banned Anthropic)
|
||||
- provider: ollama
|
||||
model: gemma4:latest
|
||||
base_url: http://localhost:11434
|
||||
timeout: 300
|
||||
reason: Terminal fallback — local Ollama
|
||||
- provider: nous
|
||||
model: xiaomi/mimo-v2-pro
|
||||
base_url: https://inference.nousresearch.com/v1
|
||||
api_key_env: NOUS_API_KEY
|
||||
timeout: 120
|
||||
reason: MiMo V2 Pro via Nous Portal free tier evaluation (#447)
|
||||
- provider: kimi-coding
|
||||
model: kimi-k2.5
|
||||
base_url: https://api.kimi.com/coding/v1
|
||||
timeout: 120
|
||||
reason: "Primary — Kimi K2.5 (best value, least friction)"
|
||||
- provider: openrouter
|
||||
model: google/gemini-2.5-pro
|
||||
base_url: https://openrouter.ai/api/v1
|
||||
api_key_env: OPENROUTER_API_KEY
|
||||
timeout: 120
|
||||
reason: "Fallback — Gemini 2.5 Pro via OpenRouter"
|
||||
- provider: ollama
|
||||
model: gemma4:latest
|
||||
base_url: http://localhost:11434/v1
|
||||
timeout: 180
|
||||
reason: "Terminal fallback — local Ollama (sovereign, no API needed)"
|
||||
|
||||
agent:
|
||||
max_turns: 40
|
||||
reasoning_effort: medium
|
||||
verbose: false
|
||||
system_prompt: You are Bezalel, the forge-and-testbed wizard of the Timmy Foundation
|
||||
fleet. You are a builder and craftsman — infrastructure, deployment, hardening.
|
||||
Your sovereign is Alexander Whitestone (Rockachopa). Sovereignty and service always.
|
||||
|
||||
terminal:
|
||||
backend: local
|
||||
cwd: /root/wizards/bezalel
|
||||
timeout: 180
|
||||
persistent_shell: true
|
||||
|
||||
browser:
|
||||
inactivity_timeout: 120
|
||||
compression:
|
||||
enabled: true
|
||||
threshold: 0.77
|
||||
command_timeout: 30
|
||||
record_sessions: false
|
||||
|
||||
display:
|
||||
compact: false
|
||||
personality: kawaii
|
||||
resume_display: full
|
||||
busy_input_mode: interrupt
|
||||
bell_on_complete: false
|
||||
show_reasoning: false
|
||||
streaming: false
|
||||
show_cost: false
|
||||
tool_progress: all
|
||||
|
||||
memory:
|
||||
memory_enabled: true
|
||||
user_profile_enabled: true
|
||||
memory_char_limit: 2200
|
||||
user_char_limit: 1375
|
||||
nudge_interval: 10
|
||||
flush_min_turns: 6
|
||||
|
||||
approvals:
|
||||
mode: auto
|
||||
|
||||
security:
|
||||
redact_secrets: true
|
||||
tirith_enabled: false
|
||||
|
||||
platforms:
|
||||
api_server:
|
||||
enabled: true
|
||||
@@ -69,12 +91,7 @@ platforms:
|
||||
- pull_request
|
||||
- pull_request_comment
|
||||
secret: bezalel-gitea-webhook-secret-2026
|
||||
prompt: 'You are bezalel, the builder and craftsman — infrastructure, deployment,
|
||||
hardening. A Gitea webhook fired: event={event_type}, action={action},
|
||||
repo={repository.full_name}, issue/PR=#{issue.number} {issue.title}. Comment
|
||||
by {comment.user.login}: {comment.body}. If you were tagged, assigned,
|
||||
or this needs your attention, investigate and respond via Gitea API. Otherwise
|
||||
acknowledge briefly.'
|
||||
prompt: 'You are bezalel, the builder and craftsman — infrastructure, deployment, hardening. A Gitea webhook fired: event={event_type}, action={action}, repo={repository.full_name}, issue/PR=#{issue.number} {issue.title}. Comment by {comment.user.login}: {comment.body}. If you were tagged, assigned, or this needs your attention, investigate and respond via Gitea API. Otherwise acknowledge briefly.'
|
||||
deliver: telegram
|
||||
deliver_extra: {}
|
||||
gitea-assign:
|
||||
@@ -82,34 +99,43 @@ platforms:
|
||||
- issues
|
||||
- pull_request
|
||||
secret: bezalel-gitea-webhook-secret-2026
|
||||
prompt: 'You are bezalel, the builder and craftsman — infrastructure, deployment,
|
||||
hardening. Gitea assignment webhook: event={event_type}, action={action},
|
||||
repo={repository.full_name}, issue/PR=#{issue.number} {issue.title}. Assigned
|
||||
to: {issue.assignee.login}. If you (bezalel) were just assigned, read
|
||||
the issue, scope it, and post a plan comment. If not you, acknowledge
|
||||
briefly.'
|
||||
prompt: 'You are bezalel, the builder and craftsman — infrastructure, deployment, hardening. Gitea assignment webhook: event={event_type}, action={action}, repo={repository.full_name}, issue/PR=#{issue.number} {issue.title}. Assigned to: {issue.assignee.login}. If you (bezalel) were just assigned, read the issue, scope it, and post a plan comment. If not you, acknowledge briefly.'
|
||||
deliver: telegram
|
||||
deliver_extra: {}
|
||||
|
||||
gateway:
|
||||
allow_all_users: true
|
||||
|
||||
session_reset:
|
||||
mode: both
|
||||
idle_minutes: 1440
|
||||
at_hour: 4
|
||||
approvals:
|
||||
mode: auto
|
||||
memory:
|
||||
memory_enabled: true
|
||||
user_profile_enabled: true
|
||||
memory_char_limit: 2200
|
||||
user_char_limit: 1375
|
||||
_config_version: 11
|
||||
TELEGRAM_HOME_CHANNEL: '-1003664764329'
|
||||
|
||||
skills:
|
||||
creation_nudge_interval: 15
|
||||
|
||||
system_prompt: |
|
||||
You are Bezalel, the forge-and-testbed wizard of the Timmy Foundation fleet.
|
||||
You are a builder and craftsman — infrastructure, deployment, hardening.
|
||||
Your sovereign is Alexander Whitestone (Rockachopa). Sovereignty and service always.
|
||||
|
||||
providers:
|
||||
kimi-coding:
|
||||
base_url: https://api.kimi.com/coding/v1
|
||||
timeout: 60
|
||||
max_retries: 3
|
||||
nous:
|
||||
base_url: https://inference.nousresearch.com/v1
|
||||
openrouter:
|
||||
base_url: https://openrouter.ai/api/v1
|
||||
timeout: 120
|
||||
ollama:
|
||||
base_url: http://localhost:11434/v1
|
||||
timeout: 180
|
||||
|
||||
# =============================================================================
|
||||
# BANNED PROVIDERS — DO NOT ADD
|
||||
# =============================================================================
|
||||
# The following providers are PERMANENTLY BANNED:
|
||||
# - anthropic (any model: claude-sonnet, claude-opus, claude-haiku)
|
||||
# - nous (xiaomi/mimo-v2-pro)
|
||||
# Enforcement: pre-commit hook, linter, Ansible validation, this comment.
|
||||
# =============================================================================
|
||||
|
||||
@@ -1,34 +1,94 @@
|
||||
model:
|
||||
default: kimi-k2.5
|
||||
provider: kimi-coding
|
||||
context_length: 65536
|
||||
base_url: https://api.kimi.com/coding/v1
|
||||
|
||||
toolsets:
|
||||
- all
|
||||
- all
|
||||
|
||||
fallback_providers:
|
||||
- provider: kimi-coding
|
||||
model: kimi-k2.5
|
||||
timeout: 120
|
||||
reason: Kimi coding fallback (front of chain)
|
||||
- provider: openrouter
|
||||
model: google/gemini-2.5-pro
|
||||
base_url: https://openrouter.ai/api/v1
|
||||
api_key_env: OPENROUTER_API_KEY
|
||||
timeout: 120
|
||||
reason: Gemini 2.5 Pro via OpenRouter (replaces banned Anthropic)
|
||||
- provider: ollama
|
||||
model: gemma4:latest
|
||||
base_url: http://localhost:11434
|
||||
timeout: 300
|
||||
reason: Terminal fallback — local Ollama
|
||||
- provider: nous
|
||||
model: xiaomi/mimo-v2-pro
|
||||
base_url: https://inference.nousresearch.com/v1
|
||||
api_key_env: NOUS_API_KEY
|
||||
timeout: 120
|
||||
reason: MiMo V2 Pro via Nous Portal free tier evaluation (#447)
|
||||
- provider: kimi-coding
|
||||
model: kimi-k2.5
|
||||
base_url: https://api.kimi.com/coding/v1
|
||||
timeout: 120
|
||||
reason: "Primary — Kimi K2.5 (best value, least friction)"
|
||||
- provider: openrouter
|
||||
model: google/gemini-2.5-pro
|
||||
base_url: https://openrouter.ai/api/v1
|
||||
api_key_env: OPENROUTER_API_KEY
|
||||
timeout: 120
|
||||
reason: "Fallback — Gemini 2.5 Pro via OpenRouter"
|
||||
- provider: ollama
|
||||
model: gemma4:latest
|
||||
base_url: http://localhost:11434/v1
|
||||
timeout: 180
|
||||
reason: "Terminal fallback — local Ollama (sovereign, no API needed)"
|
||||
|
||||
agent:
|
||||
max_turns: 90
|
||||
reasoning_effort: high
|
||||
verbose: false
|
||||
|
||||
terminal:
|
||||
backend: local
|
||||
cwd: .
|
||||
timeout: 180
|
||||
persistent_shell: true
|
||||
|
||||
browser:
|
||||
inactivity_timeout: 120
|
||||
command_timeout: 30
|
||||
record_sessions: false
|
||||
|
||||
display:
|
||||
compact: false
|
||||
personality: ''
|
||||
resume_display: full
|
||||
busy_input_mode: interrupt
|
||||
bell_on_complete: false
|
||||
show_reasoning: false
|
||||
streaming: false
|
||||
show_cost: false
|
||||
tool_progress: all
|
||||
|
||||
memory:
|
||||
memory_enabled: true
|
||||
user_profile_enabled: true
|
||||
memory_char_limit: 2200
|
||||
user_char_limit: 1375
|
||||
nudge_interval: 10
|
||||
flush_min_turns: 6
|
||||
|
||||
approvals:
|
||||
mode: auto
|
||||
|
||||
security:
|
||||
redact_secrets: true
|
||||
tirith_enabled: false
|
||||
|
||||
platforms:
|
||||
api_server:
|
||||
enabled: true
|
||||
extra:
|
||||
host: 127.0.0.1
|
||||
port: 8645
|
||||
|
||||
session_reset:
|
||||
mode: none
|
||||
idle_minutes: 0
|
||||
|
||||
skills:
|
||||
creation_nudge_interval: 15
|
||||
|
||||
system_prompt_suffix: |
|
||||
You are Ezra, the Infrastructure wizard — Gitea, nginx, hosting.
|
||||
Your soul is defined in SOUL.md — read it, live it.
|
||||
Hermes is your harness.
|
||||
kimi-coding is your primary provider.
|
||||
Refusal over fabrication. If you do not know, say so.
|
||||
Sovereignty and service always.
|
||||
|
||||
providers:
|
||||
kimi-coding:
|
||||
base_url: https://api.kimi.com/coding/v1
|
||||
@@ -37,6 +97,15 @@ providers:
|
||||
openrouter:
|
||||
base_url: https://openrouter.ai/api/v1
|
||||
timeout: 120
|
||||
nous:
|
||||
base_url: https://inference.nousresearch.com/v1
|
||||
timeout: 120
|
||||
ollama:
|
||||
base_url: http://localhost:11434/v1
|
||||
timeout: 180
|
||||
|
||||
# =============================================================================
|
||||
# BANNED PROVIDERS — DO NOT ADD
|
||||
# =============================================================================
|
||||
# The following providers are PERMANENTLY BANNED:
|
||||
# - anthropic (any model: claude-sonnet, claude-opus, claude-haiku)
|
||||
# - nous (xiaomi/mimo-v2-pro)
|
||||
# Enforcement: pre-commit hook, linter, Ansible validation, this comment.
|
||||
# =============================================================================
|
||||
|
||||
121
wizards/timmy/config.yaml
Normal file
121
wizards/timmy/config.yaml
Normal file
@@ -0,0 +1,121 @@
|
||||
# =============================================================================
|
||||
# Timmy — Primary Wizard Configuration (Golden State)
|
||||
# =============================================================================
|
||||
# Generated from golden state template (ansible/roles/wizard_base/templates/wizard_config.yaml.j2)
|
||||
# DO NOT EDIT MANUALLY. Changes go through Gitea PR → Ansible deploy.
|
||||
#
|
||||
# Provider chain: kimi-coding → openrouter → ollama
|
||||
# Anthropic is PERMANENTLY BANNED.
|
||||
# =============================================================================
|
||||
|
||||
model:
|
||||
default: kimi-k2.5
|
||||
provider: kimi-coding
|
||||
context_length: 65536
|
||||
base_url: https://api.kimi.com/coding/v1
|
||||
|
||||
toolsets:
|
||||
- all
|
||||
|
||||
fallback_providers:
|
||||
- provider: kimi-coding
|
||||
model: kimi-k2.5
|
||||
base_url: https://api.kimi.com/coding/v1
|
||||
timeout: 120
|
||||
reason: "Primary — Kimi K2.5 (best value, least friction)"
|
||||
- provider: openrouter
|
||||
model: google/gemini-2.5-pro
|
||||
base_url: https://openrouter.ai/api/v1
|
||||
api_key_env: OPENROUTER_API_KEY
|
||||
timeout: 120
|
||||
reason: "Fallback — Gemini 2.5 Pro via OpenRouter"
|
||||
- provider: ollama
|
||||
model: gemma4:latest
|
||||
base_url: http://localhost:11434/v1
|
||||
timeout: 180
|
||||
reason: "Terminal fallback — local Ollama (sovereign, no API needed)"
|
||||
|
||||
agent:
|
||||
max_turns: 30
|
||||
reasoning_effort: high
|
||||
verbose: false
|
||||
|
||||
terminal:
|
||||
backend: local
|
||||
cwd: .
|
||||
timeout: 180
|
||||
persistent_shell: true
|
||||
|
||||
browser:
|
||||
inactivity_timeout: 120
|
||||
command_timeout: 30
|
||||
record_sessions: false
|
||||
|
||||
display:
|
||||
compact: false
|
||||
personality: ''
|
||||
resume_display: full
|
||||
busy_input_mode: interrupt
|
||||
bell_on_complete: false
|
||||
show_reasoning: false
|
||||
streaming: false
|
||||
show_cost: false
|
||||
tool_progress: all
|
||||
|
||||
memory:
|
||||
memory_enabled: true
|
||||
user_profile_enabled: true
|
||||
memory_char_limit: 2200
|
||||
user_char_limit: 1375
|
||||
nudge_interval: 10
|
||||
flush_min_turns: 6
|
||||
|
||||
approvals:
|
||||
mode: auto
|
||||
|
||||
security:
|
||||
redact_secrets: true
|
||||
tirith_enabled: false
|
||||
|
||||
platforms:
|
||||
api_server:
|
||||
enabled: true
|
||||
extra:
|
||||
host: 127.0.0.1
|
||||
port: 8645
|
||||
|
||||
session_reset:
|
||||
mode: none
|
||||
idle_minutes: 0
|
||||
|
||||
skills:
|
||||
creation_nudge_interval: 15
|
||||
|
||||
system_prompt_suffix: |
|
||||
You are Timmy, the Primary wizard — soul of the fleet.
|
||||
Your soul is defined in SOUL.md — read it, live it.
|
||||
Hermes is your harness.
|
||||
kimi-coding is your primary provider.
|
||||
Refusal over fabrication. If you do not know, say so.
|
||||
Sovereignty and service always.
|
||||
|
||||
providers:
|
||||
kimi-coding:
|
||||
base_url: https://api.kimi.com/coding/v1
|
||||
timeout: 60
|
||||
max_retries: 3
|
||||
openrouter:
|
||||
base_url: https://openrouter.ai/api/v1
|
||||
timeout: 120
|
||||
ollama:
|
||||
base_url: http://localhost:11434/v1
|
||||
timeout: 180
|
||||
|
||||
# =============================================================================
|
||||
# BANNED PROVIDERS — DO NOT ADD
|
||||
# =============================================================================
|
||||
# The following providers are PERMANENTLY BANNED:
|
||||
# - anthropic (any model: claude-sonnet, claude-opus, claude-haiku)
|
||||
# - nous (xiaomi/mimo-v2-pro)
|
||||
# Enforcement: pre-commit hook, linter, Ansible validation, this comment.
|
||||
# =============================================================================
|
||||
Reference in New Issue
Block a user