Compare commits
4 Commits
data/scene
...
fix/issue-
| Author | SHA1 | Date | |
|---|---|---|---|
| b0f21bfbc9 | |||
| ad751a6de6 | |||
| 130fa40f0c | |||
| 82f9810081 |
9
cron/pipeline-scheduler.yml
Normal file
9
cron/pipeline-scheduler.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
- name: Nightly Pipeline Scheduler
|
||||
schedule: '*/30 18-23,0-8 * * *' # Every 30 min, off-peak hours only
|
||||
tasks:
|
||||
- name: Check and start pipelines
|
||||
shell: "bash scripts/nightly-pipeline-scheduler.sh"
|
||||
env:
|
||||
PIPELINE_TOKEN_LIMIT: "500000"
|
||||
PIPELINE_PEAK_START: "9"
|
||||
PIPELINE_PEAK_END: "18"
|
||||
50
scripts/nightly-pipeline-scheduler.md
Normal file
50
scripts/nightly-pipeline-scheduler.md
Normal file
@@ -0,0 +1,50 @@
|
||||
# Nightly Pipeline Scheduler
|
||||
|
||||
Auto-starts batch pipelines when inference is available.
|
||||
|
||||
## What It Does
|
||||
|
||||
1. Checks inference provider health (OpenRouter, Ollama, RunPod)
|
||||
2. Checks if it's off-peak hours (configurable, default: after 6PM)
|
||||
3. Checks interactive session load (don't fight with live users)
|
||||
4. Checks daily token budget (configurable limit)
|
||||
5. Starts the highest-priority incomplete pipeline
|
||||
|
||||
## Pipeline Priority Order
|
||||
|
||||
| Priority | Pipeline | Deps | Max Tokens |
|
||||
|----------|----------|------|------------|
|
||||
| 1 | playground-factory | none | 100,000 |
|
||||
| 2 | training-factory | none | 150,000 |
|
||||
| 3 | knowledge-mine | training-factory running | 80,000 |
|
||||
| 4 | adversary | knowledge-mine running | 50,000 |
|
||||
| 5 | codebase-genome | none | 120,000 |
|
||||
|
||||
## Usage
|
||||
|
||||
```bash
|
||||
# Normal run (used by cron)
|
||||
./scripts/nightly-pipeline-scheduler.sh
|
||||
|
||||
# Dry run (show what would start)
|
||||
./scripts/nightly-pipeline-scheduler.sh --dry-run
|
||||
|
||||
# Status report
|
||||
./scripts/nightly-pipeline-scheduler.sh --status
|
||||
|
||||
# Force start during peak hours
|
||||
./scripts/nightly-pipeline-scheduler.sh --force
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
Set via environment variables:
|
||||
- `PIPELINE_TOKEN_LIMIT`: Daily token budget (default: 500,000)
|
||||
- `PIPELINE_PEAK_START`: Peak hours start (default: 9)
|
||||
- `PIPELINE_PEAK_END`: Peak hours end (default: 18)
|
||||
- `HERMES_HOME`: Hermes home directory (default: ~/.hermes)
|
||||
|
||||
## Cron
|
||||
|
||||
Runs every 30 minutes. Off-peak only (unless --force).
|
||||
See `cron/pipeline-scheduler.yml`.
|
||||
383
scripts/nightly-pipeline-scheduler.sh
Normal file
383
scripts/nightly-pipeline-scheduler.sh
Normal file
@@ -0,0 +1,383 @@
|
||||
#!/usr/bin/env bash
|
||||
# nightly-pipeline-scheduler.sh — Auto-start batch pipelines when inference is available.
|
||||
#
|
||||
# Checks provider health, pipeline progress, token budget, and interactive load.
|
||||
# Starts the highest-priority incomplete pipeline that can run.
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/nightly-pipeline-scheduler.sh # Normal run
|
||||
# ./scripts/nightly-pipeline-scheduler.sh --dry-run # Show what would start
|
||||
# ./scripts/nightly-pipeline-scheduler.sh --status # Pipeline status report
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# --- Configuration ---
|
||||
HERMES_HOME="${HERMES_HOME:-$HOME/.hermes}"
|
||||
BUDGET_FILE="${HERMES_HOME}/pipeline_budget.json"
|
||||
STATE_FILE="${HERMES_HOME}/pipeline_state.json"
|
||||
LOG_FILE="${HERMES_HOME}/logs/pipeline-scheduler.log"
|
||||
TOKEN_DAILY_LIMIT="${PIPELINE_TOKEN_LIMIT:-500000}"
|
||||
PEAK_HOURS_START="${PIPELINE_PEAK_START:-9}"
|
||||
PEAK_HOURS_END="${PIPELINE_PEAK_END:-18}"
|
||||
|
||||
# Pipeline definitions (priority order)
|
||||
# Each pipeline: name, script, max_tokens, dependencies
|
||||
PIPELINES=(
|
||||
"playground-factory|scripts/pipeline_playground_factory.sh|100000|none"
|
||||
"training-factory|scripts/pipeline_training_factory.sh|150000|none"
|
||||
"knowledge-mine|scripts/pipeline_knowledge_mine.sh|80000|training-factory"
|
||||
"adversary|scripts/pipeline_adversary.sh|50000|knowledge-mine"
|
||||
"codebase-genome|scripts/pipeline_codebase_genome.sh|120000|none"
|
||||
)
|
||||
|
||||
# --- Colors ---
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[0;33m'
|
||||
CYAN='\033[0;36m'
|
||||
NC='\033[0m'
|
||||
|
||||
# --- Helpers ---
|
||||
now_hour() { date +%-H; }
|
||||
is_peak_hours() {
|
||||
local h=$(now_hour)
|
||||
[[ $h -ge $PEAK_HOURS_START && $h -lt $PEAK_HOURS_END ]]
|
||||
}
|
||||
|
||||
ensure_dirs() {
|
||||
mkdir -p "$(dirname "$LOG_FILE")" "$(dirname "$BUDGET_FILE")" "$(dirname "$STATE_FILE")"
|
||||
}
|
||||
|
||||
log() { echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*" | tee -a "$LOG_FILE"; }
|
||||
|
||||
get_budget_used_today() {
|
||||
if [[ -f "$BUDGET_FILE" ]]; then
|
||||
local today=$(date +%Y-%m-%d)
|
||||
python3 -c "
|
||||
import json, sys
|
||||
with open('$BUDGET_FILE') as f:
|
||||
d = json.load(f)
|
||||
print(d.get('daily', {}).get('$today', {}).get('tokens_used', 0))
|
||||
" 2>/dev/null || echo 0
|
||||
else
|
||||
echo 0
|
||||
fi
|
||||
}
|
||||
|
||||
get_budget_remaining() {
|
||||
local used=$(get_budget_used_today)
|
||||
echo $((TOKEN_DAILY_LIMIT - used))
|
||||
}
|
||||
|
||||
update_budget() {
|
||||
local pipeline="$1"
|
||||
local tokens="$2"
|
||||
local today=$(date +%Y-%m-%d)
|
||||
python3 -c "
|
||||
import json, os
|
||||
path = '$BUDGET_FILE'
|
||||
d = {}
|
||||
if os.path.exists(path):
|
||||
with open(path) as f:
|
||||
d = json.load(f)
|
||||
daily = d.setdefault('daily', {})
|
||||
day = daily.setdefault('$today', {'tokens_used': 0, 'pipelines': {}})
|
||||
day['tokens_used'] = day.get('tokens_used', 0) + $tokens
|
||||
day['pipelines']['$pipeline'] = day['pipelines'].get('$pipeline', 0) + $tokens
|
||||
with open(path, 'w') as f:
|
||||
json.dump(d, f, indent=2)
|
||||
"
|
||||
}
|
||||
|
||||
get_pipeline_state() {
|
||||
if [[ -f "$STATE_FILE" ]]; then
|
||||
cat "$STATE_FILE"
|
||||
else
|
||||
echo "{}"
|
||||
fi
|
||||
}
|
||||
|
||||
set_pipeline_state() {
|
||||
local pipeline="$1"
|
||||
local state="$2" # running, complete, failed, skipped
|
||||
python3 -c "
|
||||
import json, os
|
||||
path = '$STATE_FILE'
|
||||
d = {}
|
||||
if os.path.exists(path):
|
||||
with open(path) as f:
|
||||
d = json.load(f)
|
||||
d['$pipeline'] = {'state': '$state', 'updated': '$(date -Iseconds)'}
|
||||
with open(path, 'w') as f:
|
||||
json.dump(d, f, indent=2)
|
||||
"
|
||||
}
|
||||
|
||||
is_pipeline_complete() {
|
||||
local pipeline="$1"
|
||||
python3 -c "
|
||||
import json, os
|
||||
path = '$STATE_FILE'
|
||||
if not os.path.exists(path):
|
||||
print('false')
|
||||
else:
|
||||
with open(path) as f:
|
||||
d = json.load(f)
|
||||
state = d.get('$pipeline', {}).get('state', 'not_started')
|
||||
print('true' if state == 'complete' else 'false')
|
||||
" 2>/dev/null || echo false
|
||||
}
|
||||
|
||||
is_pipeline_running() {
|
||||
local pipeline="$1"
|
||||
python3 -c "
|
||||
import json, os
|
||||
path = '$STATE_FILE'
|
||||
if not os.path.exists(path):
|
||||
print('false')
|
||||
else:
|
||||
with open(path) as f:
|
||||
d = json.load(f)
|
||||
state = d.get('$pipeline', {}).get('state', 'not_started')
|
||||
print('true' if state == 'running' else 'false')
|
||||
" 2>/dev/null || echo false
|
||||
}
|
||||
|
||||
check_dependency() {
|
||||
local dep="$1"
|
||||
if [[ "$dep" == "none" ]]; then
|
||||
return 0
|
||||
fi
|
||||
# For knowledge-mine: training-factory must be running or complete
|
||||
if [[ "$dep" == "training-factory" ]]; then
|
||||
local state=$(python3 -c "
|
||||
import json, os
|
||||
path = '$STATE_FILE'
|
||||
if not os.path.exists(path):
|
||||
print('not_started')
|
||||
else:
|
||||
with open(path) as f:
|
||||
d = json.load(f)
|
||||
print(d.get('training-factory', {}).get('state', 'not_started'))
|
||||
" 2>/dev/null || echo "not_started")
|
||||
[[ "$state" == "running" || "$state" == "complete" ]]
|
||||
return $?
|
||||
fi
|
||||
# For adversary: knowledge-mine must be at least 50% done
|
||||
# Simplified: check if it's running (we'd need progress tracking for 50%)
|
||||
if [[ "$dep" == "knowledge-mine" ]]; then
|
||||
local state=$(python3 -c "
|
||||
import json, os
|
||||
path = '$STATE_FILE'
|
||||
if not os.path.exists(path):
|
||||
print('not_started')
|
||||
else:
|
||||
with open(path) as f:
|
||||
d = json.load(f)
|
||||
print(d.get('knowledge-mine', {}).get('state', 'not_started'))
|
||||
" 2>/dev/null || echo "not_started")
|
||||
[[ "$state" == "running" || "$state" == "complete" ]]
|
||||
return $?
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
check_inference_available() {
|
||||
# Check if any inference provider is responding
|
||||
# 1. Check OpenRouter
|
||||
local or_ok=$(curl -s -o /dev/null -w "%{http_code}" \
|
||||
--connect-timeout 5 "https://openrouter.ai/api/v1/models" 2>/dev/null || echo "000")
|
||||
|
||||
# 2. Check local Ollama
|
||||
local ollama_ok=$(curl -s -o /dev/null -w "%{http_code}" \
|
||||
--connect-timeout 5 "http://localhost:11434/api/tags" 2>/dev/null || echo "000")
|
||||
|
||||
# 3. Check RunPod (if configured)
|
||||
local runpod_ok="000"
|
||||
if [[ -n "${RUNPOD_ENDPOINT:-}" ]]; then
|
||||
runpod_ok=$(curl -s -o /dev/null -w "%{http_code}" \
|
||||
--connect-timeout 5 "$RUNPOD_ENDPOINT/health" 2>/dev/null || echo "000")
|
||||
fi
|
||||
|
||||
if [[ "$or_ok" == "200" || "$ollama_ok" == "200" || "$runpod_ok" == "200" ]]; then
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
check_interactive_load() {
|
||||
# Check if there are active interactive sessions (don't fight with live users)
|
||||
# Look for tmux panes with active hermes sessions
|
||||
local active=$(tmux list-panes -a -F '#{pane_pid} #{pane_current_command}' 2>/dev/null \
|
||||
| grep -c "hermes\|python3" || echo 0)
|
||||
|
||||
# If more than 3 interactive sessions, skip pipeline start
|
||||
if [[ $active -gt 3 ]]; then
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
start_pipeline() {
|
||||
local name="$1"
|
||||
local script="$2"
|
||||
local max_tokens="$3"
|
||||
local budget_remaining="$4"
|
||||
local mode="${5:-run}"
|
||||
|
||||
if [[ "$budget_remaining" -lt "$max_tokens" ]]; then
|
||||
log "SKIP $name: insufficient budget ($budget_remaining < $max_tokens tokens)"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [[ ! -f "$script" ]]; then
|
||||
log "SKIP $name: script not found ($script)"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [[ "$mode" == "dry-run" ]]; then
|
||||
log "DRY-RUN: Would start $name (budget: $budget_remaining, needs: $max_tokens)"
|
||||
return 0
|
||||
fi
|
||||
|
||||
log "START $name (budget: $budget_remaining, max_tokens: $max_tokens)"
|
||||
set_pipeline_state "$name" "running"
|
||||
|
||||
# Run in background, capture output
|
||||
local log_path="${HERMES_HOME}/logs/pipeline-${name}.log"
|
||||
bash "$script" --max-tokens "$max_tokens" >> "$log_path" 2>&1 &
|
||||
local pid=$!
|
||||
|
||||
# Wait a moment to check if it started OK
|
||||
sleep 2
|
||||
if kill -0 $pid 2>/dev/null; then
|
||||
log "RUNNING $name (PID: $pid, log: $log_path)"
|
||||
# Record the PID
|
||||
python3 -c "
|
||||
import json, os
|
||||
path = '$STATE_FILE'
|
||||
d = {}
|
||||
if os.path.exists(path):
|
||||
with open(path) as f:
|
||||
d = json.load(f)
|
||||
d['$name']['pid'] = $pid
|
||||
with open(path, 'w') as f:
|
||||
json.dump(d, f, indent=2)
|
||||
"
|
||||
return 0
|
||||
else
|
||||
log "FAIL $name: script exited immediately"
|
||||
set_pipeline_state "$name" "failed"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# --- Main ---
|
||||
main() {
|
||||
local mode="${1:-run}"
|
||||
ensure_dirs
|
||||
|
||||
log "=== Pipeline Scheduler ($mode) ==="
|
||||
|
||||
# Check 1: Is inference available?
|
||||
if ! check_inference_available; then
|
||||
log "No inference provider available. Skipping all pipelines."
|
||||
exit 0
|
||||
fi
|
||||
log "Inference: AVAILABLE"
|
||||
|
||||
# Check 2: Is it peak hours?
|
||||
if is_peak_hours && [[ "$mode" != "--force" ]]; then
|
||||
local h=$(now_hour)
|
||||
log "Peak hours ($h:00). Skipping pipeline start. Use --force to override."
|
||||
exit 0
|
||||
fi
|
||||
log "Off-peak: OK"
|
||||
|
||||
# Check 3: Interactive load
|
||||
if ! check_interactive_load && [[ "$mode" != "--force" ]]; then
|
||||
log "High interactive load. Skipping pipeline start."
|
||||
exit 0
|
||||
fi
|
||||
log "Interactive load: OK"
|
||||
|
||||
# Check 4: Token budget
|
||||
local budget=$(get_budget_remaining)
|
||||
log "Token budget remaining: $budget / $TOKEN_DAILY_LIMIT"
|
||||
|
||||
if [[ $budget -le 0 ]]; then
|
||||
log "Daily token budget exhausted. Stopping."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Check 5: Pipeline status
|
||||
if [[ "$mode" == "--status" ]]; then
|
||||
echo -e "${CYAN}Pipeline Status:${NC}"
|
||||
echo "────────────────────────────────────────────────────"
|
||||
for entry in "${PIPELINES[@]}"; do
|
||||
IFS='|' read -r name script max_tokens dep <<< "$entry"
|
||||
local state=$(python3 -c "
|
||||
import json, os
|
||||
path = '$STATE_FILE'
|
||||
if not os.path.exists(path):
|
||||
print('not_started')
|
||||
else:
|
||||
with open(path) as f:
|
||||
d = json.load(f)
|
||||
print(d.get('$name', {}).get('state', 'not_started'))
|
||||
" 2>/dev/null || echo "not_started")
|
||||
|
||||
local color=$NC
|
||||
case "$state" in
|
||||
running) color=$YELLOW ;;
|
||||
complete) color=$GREEN ;;
|
||||
failed) color=$RED ;;
|
||||
esac
|
||||
printf " %-25s %b%s%b (max: %s tokens, dep: %s)\n" "$name" "$color" "$state" "$NC" "$max_tokens" "$dep"
|
||||
done
|
||||
echo "────────────────────────────────────────────────────"
|
||||
echo " Budget: $budget / $TOKEN_DAILY_LIMIT tokens remaining"
|
||||
echo " Peak hours: $PEAK_HOURS_START:00 - $PEAK_HOURS_END:00"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Find and start the highest-priority incomplete pipeline
|
||||
local started=0
|
||||
for entry in "${PIPELINES[@]}"; do
|
||||
IFS='|' read -r name script max_tokens dep <<< "$entry"
|
||||
|
||||
# Skip if already running or complete
|
||||
if [[ "$(is_pipeline_running $name)" == "true" ]]; then
|
||||
log "SKIP $name: already running"
|
||||
continue
|
||||
fi
|
||||
if [[ "$(is_pipeline_complete $name)" == "true" ]]; then
|
||||
log "SKIP $name: already complete"
|
||||
continue
|
||||
fi
|
||||
|
||||
# Check dependency
|
||||
if ! check_dependency "$dep"; then
|
||||
log "SKIP $name: dependency $dep not met"
|
||||
continue
|
||||
fi
|
||||
|
||||
# Try to start
|
||||
if start_pipeline "$name" "$script" "$max_tokens" "$budget" "$mode"; then
|
||||
started=1
|
||||
# Only start one pipeline per run (let it claim tokens before next check)
|
||||
# Exception: playground-factory and training-factory can run in parallel
|
||||
if [[ "$name" != "playground-factory" && "$name" != "training-factory" ]]; then
|
||||
break
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $started -eq 0 ]]; then
|
||||
log "No pipelines to start (all complete, running, or blocked)."
|
||||
fi
|
||||
|
||||
log "=== Pipeline Scheduler done ==="
|
||||
}
|
||||
|
||||
main "$@"
|
||||
271
scripts/pr_triage.py
Normal file
271
scripts/pr_triage.py
Normal file
@@ -0,0 +1,271 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
PR Triage Automation — Categorize, deduplicate, and report on open PRs.
|
||||
|
||||
Usage:
|
||||
python scripts/pr_triage.py # Generate report
|
||||
python scripts/pr_triage.py --json # JSON output
|
||||
python scripts/pr_triage.py --auto-merge # Auto-merge safe PRs
|
||||
python scripts/pr_triage.py --repo timmy-home # Single repo
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from collections import Counter
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
|
||||
try:
|
||||
import urllib.request
|
||||
except ImportError:
|
||||
print("Error: urllib not available")
|
||||
sys.exit(1)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Config
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
GITEA_BASE = os.environ.get("GITEA_API_BASE", "https://forge.alexanderwhitestone.com/api/v1")
|
||||
TOKEN_PATH = os.environ.get("GITEA_TOKEN_PATH", str(Path.home() / ".config/gitea/token"))
|
||||
ORG = "Timmy_Foundation"
|
||||
|
||||
DEFAULT_REPOS = [
|
||||
"timmy-home",
|
||||
"hermes-agent",
|
||||
"timmy-config",
|
||||
"the-nexus",
|
||||
"the-door",
|
||||
"burn-fleet",
|
||||
"second-son-of-timmy",
|
||||
]
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Categories
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
CATEGORY_RULES = {
|
||||
"training-data": [
|
||||
r"training[- ]?data", r"scene[- ]?description", r"dpo", r"training",
|
||||
r"batch[- ]?\d+", r"training[- ]?pipeline", r"jsonl",
|
||||
],
|
||||
"bug-fix": [
|
||||
r"^fix[\(:]", r"\[BUG\]", r"\[FIX\]", r"bug fix", r"fixes #\d+",
|
||||
r"closes #\d+", r"broken", r"crash", r"regression",
|
||||
],
|
||||
"feature": [
|
||||
r"^feat[\(:]", r"\[FEAT\]", r"\[FEATURE\]", r"new feature",
|
||||
r"add .+ support", r"implement",
|
||||
],
|
||||
"docs": [
|
||||
r"^docs[\(:]", r"documentation", r"readme", r"genome",
|
||||
],
|
||||
"security": [
|
||||
r"\[SECURITY\]", r"\[VITALIK\]", r"shield", r"injection",
|
||||
r"vulnerability", r"hardening",
|
||||
],
|
||||
"infra": [
|
||||
r"\[INFRA\]", r"deploy", r"ansible", r"docker", r"ci[/ ]cd",
|
||||
r"cron", r"watchdog", r"systemd",
|
||||
],
|
||||
"research": [
|
||||
r"research", r"benchmark", r"evaluation", r"analysis",
|
||||
r"\[BIG-BRAIN\]", r"investigate",
|
||||
],
|
||||
"other": [], # fallback
|
||||
}
|
||||
|
||||
|
||||
def categorize_pr(title: str, body: str) -> str:
|
||||
"""Categorize a PR by its title and body."""
|
||||
text = f"{title} {body}".lower()
|
||||
for category, patterns in CATEGORY_RULES.items():
|
||||
if category == "other":
|
||||
continue
|
||||
for pattern in patterns:
|
||||
if re.search(pattern, text, re.IGNORECASE):
|
||||
return category
|
||||
return "other"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Gitea API
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _load_token() -> str:
|
||||
try:
|
||||
return open(TOKEN_PATH).read().strip()
|
||||
except FileNotFoundError:
|
||||
print(f"Error: Token not found at {TOKEN_PATH}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def api_get(path: str, token: str) -> Any:
|
||||
req = urllib.request.Request(f"{GITEA_BASE}{path}")
|
||||
req.add_header("Authorization", f"token {token}")
|
||||
resp = urllib.request.urlopen(req, timeout=30)
|
||||
return json.loads(resp.read())
|
||||
|
||||
|
||||
def get_open_prs(repo: str, token: str) -> list[dict]:
|
||||
"""Fetch all open PRs for a repo."""
|
||||
prs = []
|
||||
page = 1
|
||||
while True:
|
||||
try:
|
||||
batch = api_get(f"/repos/{ORG}/{repo}/pulls?state=open&limit=50&page={page}", token)
|
||||
if not batch:
|
||||
break
|
||||
prs.extend(batch)
|
||||
if len(batch) < 50:
|
||||
break
|
||||
page += 1
|
||||
except Exception:
|
||||
break
|
||||
return prs
|
||||
|
||||
|
||||
def get_issue_state(repo: str, issue_num: int, token: str) -> Optional[str]:
|
||||
"""Check if a referenced issue is still open."""
|
||||
try:
|
||||
issue = api_get(f"/repos/{ORG}/{repo}/issues/{issue_num}", token)
|
||||
return issue.get("state", "unknown")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def find_referenced_issues(pr_body: str, pr_title: str) -> list[int]:
|
||||
"""Extract issue numbers referenced in PR body/title."""
|
||||
text = f"{pr_title} {pr_body}"
|
||||
return [int(m) for m in re.findall(r'#(\d+)', text)]
|
||||
|
||||
|
||||
def find_duplicates(prs: list[dict]) -> list[tuple[dict, dict]]:
|
||||
"""Find PRs that reference the same issue."""
|
||||
issue_to_prs: dict[int, list[dict]] = {}
|
||||
for pr in prs:
|
||||
refs = find_referenced_issues(pr.get("body", ""), pr.get("title", ""))
|
||||
for issue_num in refs:
|
||||
issue_to_prs.setdefault(issue_num, []).append(pr)
|
||||
|
||||
duplicates = []
|
||||
for issue_num, pr_list in issue_to_prs.items():
|
||||
if len(pr_list) > 1:
|
||||
# Pair up duplicates
|
||||
for i in range(len(pr_list)):
|
||||
for j in range(i + 1, len(pr_list)):
|
||||
duplicates.append((pr_list[i], pr_list[j]))
|
||||
|
||||
return duplicates
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Triage
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def triage_repo(repo: str, token: str) -> dict:
|
||||
"""Triage all open PRs for a repo."""
|
||||
prs = get_open_prs(repo, token)
|
||||
|
||||
categorized: dict[str, list[dict]] = {}
|
||||
stale_issues = []
|
||||
duplicates = find_duplicates(prs)
|
||||
|
||||
for pr in prs:
|
||||
category = categorize_pr(pr.get("title", ""), pr.get("body", ""))
|
||||
categorized.setdefault(category, []).append(pr)
|
||||
|
||||
# Check referenced issues
|
||||
refs = find_referenced_issues(pr.get("body", ""), pr.get("title", ""))
|
||||
for issue_num in refs:
|
||||
state = get_issue_state(repo, issue_num, token)
|
||||
if state == "closed":
|
||||
stale_issues.append({"pr": pr["number"], "issue": issue_num, "repo": repo})
|
||||
|
||||
return {
|
||||
"repo": repo,
|
||||
"total_prs": len(prs),
|
||||
"by_category": {k: len(v) for k, v in categorized.items()},
|
||||
"categorized": categorized,
|
||||
"duplicates": [(a["number"], b["number"]) for a, b in duplicates],
|
||||
"stale_issues": stale_issues,
|
||||
}
|
||||
|
||||
|
||||
def triage_all(repos: list[str], token: str) -> list[dict]:
|
||||
"""Triage all repos."""
|
||||
results = []
|
||||
for repo in repos:
|
||||
print(f" Triaging {repo}...", file=sys.stderr)
|
||||
try:
|
||||
result = triage_repo(repo, token)
|
||||
results.append(result)
|
||||
except Exception as e:
|
||||
print(f" Error triaging {repo}: {e}", file=sys.stderr)
|
||||
results.append({"repo": repo, "error": str(e)})
|
||||
return results
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Report
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def generate_markdown_report(results: list[dict]) -> str:
|
||||
"""Generate a markdown triage report."""
|
||||
total_prs = sum(r.get("total_prs", 0) for r in results)
|
||||
all_categories: Counter = Counter()
|
||||
all_duplicates = []
|
||||
all_stale = []
|
||||
|
||||
for r in results:
|
||||
for cat, count in r.get("by_category", {}).items():
|
||||
all_categories[cat] += count
|
||||
all_duplicates.extend(r.get("duplicates", []))
|
||||
all_stale.extend(r.get("stale_issues", []))
|
||||
|
||||
lines = [
|
||||
"# PR Triage Report",
|
||||
"",
|
||||
f"Generated: {datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M UTC')}",
|
||||
"",
|
||||
"## Summary",
|
||||
"",
|
||||
f"| Metric | Count |",
|
||||
f"|--------|-------|",
|
||||
f"| Total open PRs | {total_prs} |",
|
||||
f"| Repos scanned | {len(results)} |",
|
||||
f"| Duplicates found | {len(all_duplicates)} |",
|
||||
f"| Stale (issue closed) | {len(all_stale)} |",
|
||||
"",
|
||||
"## By Category",
|
||||
"",
|
||||
"| Category | Count |",
|
||||
"|----------|-------|",
|
||||
]
|
||||
|
||||
for cat, count in all_categories.most_common():
|
||||
lines.append(f"| {cat} | {count} |")
|
||||
|
||||
if all_duplicates:
|
||||
lines.extend(["", "## Duplicates (same issue referenced)", ""])
|
||||
for a, b in all_duplicates:
|
||||
lines.append(f"- PR #{a} and PR #{b}")
|
||||
|
||||
if all_stale:
|
||||
lines.extend(["", "## Stale PRs (referenced issue is closed)", ""])
|
||||
for s in all_stale:
|
||||
lines.append(f"- {s['repo']} PR #{s['pr']} → issue #{s['issue']} (closed)")
|
||||
|
||||
# Per-repo detail
|
||||
for r in results:
|
||||
if r.get("error"):
|
||||
lines.extend(["", f"## {r['repo']} — ERROR", "", f"```{r['error']}```"])
|
||||
continue
|
||||
|
||||
lines.extend([f"", f"## {r['repo']} ({r.get('total_prs', 0)} open PRs)", ""])
|
||||
for cat, prs in r.get("categorized", {}).items():
|
||||
if not prs:
|
||||
continue
|
||||
lines.append(f"
|
||||
Reference in New Issue
Block a user