Closes #1250 - Shell/Python script for cron on Hermes (every 6h) - Identifies PRs that are both conflicted AND superseded - Matches by Closes #NNN references and title similarity (60%+ overlap) - Configurable grace period via GRACE_HOURS env var - DRY_RUN mode for safe testing - Idempotent — safe to re-run
202 lines
7.4 KiB
Bash
Executable File
202 lines
7.4 KiB
Bash
Executable File
#!/usr/bin/env bash
|
|
# ═══════════════════════════════════════════════════════════════
|
|
# stale-pr-closer.sh — Auto-close conflicted PRs superseded by
|
|
# already-merged work.
|
|
#
|
|
# Designed for cron on Hermes:
|
|
# 0 */6 * * * /path/to/the-nexus/.githooks/stale-pr-closer.sh
|
|
#
|
|
# Closes #1250 (parent epic #1248)
|
|
# ═══════════════════════════════════════════════════════════════
|
|
set -euo pipefail
|
|
|
|
# ─── Configuration ──────────────────────────────────────────
|
|
GITEA_URL="${GITEA_URL:-https://forge.alexanderwhitestone.com}"
|
|
GITEA_TOKEN="${GITEA_TOKEN:?Set GITEA_TOKEN env var}"
|
|
REPO="${REPO:-Timmy_Foundation/the-nexus}"
|
|
GRACE_HOURS="${GRACE_HOURS:-24}"
|
|
DRY_RUN="${DRY_RUN:-false}"
|
|
|
|
API="$GITEA_URL/api/v1"
|
|
AUTH="Authorization: token $GITEA_TOKEN"
|
|
|
|
log() { echo "[$(date -u +%Y-%m-%dT%H:%M:%SZ)] $*"; }
|
|
|
|
# ─── Fetch open PRs ────────────────────────────────────────
|
|
log "Checking open PRs for $REPO (grace period: ${GRACE_HOURS}h, dry_run: $DRY_RUN)"
|
|
|
|
OPEN_PRS=$(curl -s -H "$AUTH" "$API/repos/$REPO/pulls?state=open&limit=50")
|
|
PR_COUNT=$(echo "$OPEN_PRS" | python3 -c "import json,sys; print(len(json.loads(sys.stdin.read())))")
|
|
|
|
if [ "$PR_COUNT" = "0" ]; then
|
|
log "No open PRs. Done."
|
|
exit 0
|
|
fi
|
|
|
|
log "Found $PR_COUNT open PR(s)"
|
|
|
|
# ─── Fetch recently merged PRs (for supersession check) ────
|
|
MERGED_PRS=$(curl -s -H "$AUTH" "$API/repos/$REPO/pulls?state=closed&limit=100&sort=updated&direction=desc")
|
|
|
|
# ─── Process each open PR ──────────────────────────────────
|
|
echo "$OPEN_PRS" | python3 -c "
|
|
import json, sys, re
|
|
from datetime import datetime, timedelta, timezone
|
|
|
|
grace_hours = int('$GRACE_HOURS')
|
|
dry_run = '$DRY_RUN' == 'true'
|
|
api = '$API'
|
|
repo = '$REPO'
|
|
|
|
open_prs = json.loads(sys.stdin.read())
|
|
|
|
# Read merged PRs from file we'll pipe separately
|
|
# (We handle this in the shell wrapper below)
|
|
" 2>/dev/null || true
|
|
|
|
# Use Python for the complex logic
|
|
python3 << 'PYEOF'
|
|
import json, sys, os, re, subprocess
|
|
from datetime import datetime, timedelta, timezone
|
|
|
|
GITEA_URL = os.environ.get("GITEA_URL", "https://forge.alexanderwhitestone.com")
|
|
GITEA_TOKEN = os.environ["GITEA_TOKEN"]
|
|
REPO = os.environ.get("REPO", "Timmy_Foundation/the-nexus")
|
|
GRACE_HOURS = int(os.environ.get("GRACE_HOURS", "24"))
|
|
DRY_RUN = os.environ.get("DRY_RUN", "false") == "true"
|
|
|
|
API = f"{GITEA_URL}/api/v1"
|
|
HEADERS = {"Authorization": f"token {GITEA_TOKEN}", "Content-Type": "application/json"}
|
|
|
|
import urllib.request, urllib.error
|
|
|
|
def api_get(path):
|
|
req = urllib.request.Request(f"{API}{path}", headers=HEADERS)
|
|
with urllib.request.urlopen(req) as resp:
|
|
return json.loads(resp.read())
|
|
|
|
def api_post(path, data):
|
|
body = json.dumps(data).encode()
|
|
req = urllib.request.Request(f"{API}{path}", data=body, headers=HEADERS, method="POST")
|
|
with urllib.request.urlopen(req) as resp:
|
|
return json.loads(resp.read())
|
|
|
|
def api_patch(path, data):
|
|
body = json.dumps(data).encode()
|
|
req = urllib.request.Request(f"{API}{path}", data=body, headers=HEADERS, method="PATCH")
|
|
with urllib.request.urlopen(req) as resp:
|
|
return json.loads(resp.read())
|
|
|
|
def log(msg):
|
|
from datetime import datetime, timezone
|
|
ts = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
print(f"[{ts}] {msg}")
|
|
|
|
now = datetime.now(timezone.utc)
|
|
cutoff = now - timedelta(hours=GRACE_HOURS)
|
|
|
|
# Fetch open PRs
|
|
open_prs = api_get(f"/repos/{REPO}/pulls?state=open&limit=50")
|
|
if not open_prs:
|
|
log("No open PRs. Done.")
|
|
sys.exit(0)
|
|
|
|
log(f"Found {len(open_prs)} open PR(s)")
|
|
|
|
# Fetch recently merged PRs
|
|
merged_prs = api_get(f"/repos/{REPO}/pulls?state=closed&limit=100&sort=updated&direction=desc")
|
|
merged_prs = [p for p in merged_prs if p.get("merged")]
|
|
|
|
# Build lookup: issue_number -> merged PR that closes it
|
|
# Parse "Closes #NNN" from merged PR bodies
|
|
def extract_closes(body):
|
|
if not body:
|
|
return set()
|
|
return set(int(m) for m in re.findall(r'(?:closes?|fixes?|resolves?)\s+#(\d+)', body, re.IGNORECASE))
|
|
|
|
merged_by_issue = {}
|
|
for mp in merged_prs:
|
|
for issue_num in extract_closes(mp.get("body", "")):
|
|
merged_by_issue[issue_num] = mp
|
|
|
|
# Also build a lookup by title similarity (for PRs that implement same feature without referencing same issue)
|
|
merged_by_title_words = {}
|
|
for mp in merged_prs:
|
|
# Extract meaningful words from title
|
|
title = re.sub(r'\[claude\]|\[.*?\]|feat\(.*?\):', '', mp.get("title", "")).strip().lower()
|
|
words = set(w for w in re.findall(r'\w+', title) if len(w) > 3)
|
|
if words:
|
|
merged_by_title_words[mp["number"]] = (words, mp)
|
|
|
|
closed_count = 0
|
|
|
|
for pr in open_prs:
|
|
pr_num = pr["number"]
|
|
pr_title = pr["title"]
|
|
mergeable = pr.get("mergeable", True)
|
|
updated_at = datetime.fromisoformat(pr["updated_at"].replace("Z", "+00:00"))
|
|
|
|
# Skip if within grace period
|
|
if updated_at > cutoff:
|
|
log(f" PR #{pr_num}: within grace period, skipping")
|
|
continue
|
|
|
|
# Check 1: Is it conflicted?
|
|
if mergeable:
|
|
log(f" PR #{pr_num}: mergeable, skipping")
|
|
continue
|
|
|
|
# Check 2: Does a merged PR close the same issue?
|
|
pr_closes = extract_closes(pr.get("body", ""))
|
|
superseded_by = None
|
|
|
|
for issue_num in pr_closes:
|
|
if issue_num in merged_by_issue:
|
|
superseded_by = merged_by_issue[issue_num]
|
|
break
|
|
|
|
# Check 3: Title similarity match (if no issue match)
|
|
if not superseded_by:
|
|
pr_title_clean = re.sub(r'\[.*?\]|feat\(.*?\):', '', pr_title).strip().lower()
|
|
pr_words = set(w for w in re.findall(r'\w+', pr_title_clean) if len(w) > 3)
|
|
|
|
best_overlap = 0
|
|
for mp_num, (mp_words, mp) in merged_by_title_words.items():
|
|
if mp_num == pr_num:
|
|
continue
|
|
overlap = len(pr_words & mp_words)
|
|
# Require at least 60% word overlap
|
|
if pr_words and overlap / len(pr_words) >= 0.6 and overlap > best_overlap:
|
|
best_overlap = overlap
|
|
superseded_by = mp
|
|
|
|
if not superseded_by:
|
|
log(f" PR #{pr_num}: conflicted but no superseding PR found, skipping")
|
|
continue
|
|
|
|
sup_num = superseded_by["number"]
|
|
sup_title = superseded_by["title"]
|
|
merged_at = superseded_by.get("merged_at", "unknown")[:10]
|
|
|
|
comment = (
|
|
f"**Auto-closed by stale-pr-closer**\n\n"
|
|
f"This PR has merge conflicts and has been superseded by #{sup_num} "
|
|
f"(\"{sup_title}\"), merged {merged_at}.\n\n"
|
|
f"If this PR contains unique work not covered by #{sup_num}, "
|
|
f"please reopen and rebase against `main`."
|
|
)
|
|
|
|
if DRY_RUN:
|
|
log(f" [DRY RUN] Would close PR #{pr_num} — superseded by #{sup_num}")
|
|
else:
|
|
# Post comment
|
|
api_post(f"/repos/{REPO}/issues/{pr_num}/comments", {"body": comment})
|
|
# Close PR
|
|
api_patch(f"/repos/{REPO}/pulls/{pr_num}", {"state": "closed"})
|
|
log(f" Closed PR #{pr_num} — superseded by #{sup_num} ({sup_title})")
|
|
|
|
closed_count += 1
|
|
|
|
log(f"Done. {'Would close' if DRY_RUN else 'Closed'} {closed_count} stale PR(s).")
|
|
PYEOF
|