Compare commits
1 Commits
fix/1470
...
fix/cleanu
| Author | SHA1 | Date | |
|---|---|---|---|
| dc210580e2 |
@@ -1,24 +0,0 @@
|
||||
# PR Backlog Report — Timmy_Foundation/timmy-config
|
||||
|
||||
Generated: 2026-04-14 23:23:33
|
||||
|
||||
## Summary
|
||||
|
||||
- **Total Open PRs**: 50
|
||||
- **Stale (>30 days)**: 0
|
||||
- **Recent (<7 days)**: 50
|
||||
|
||||
## Recommendations
|
||||
|
||||
### Immediate Actions
|
||||
1. **Review stale PRs**: 0 PRs are >30 days old
|
||||
2. **Close duplicates**: Check for duplicate PRs on same issues
|
||||
3. **Assign reviewers**: Ensure each PR has a reviewer
|
||||
|
||||
### Process Improvements
|
||||
1. **Set SLAs**: Review within 48 hours, merge within 7 days
|
||||
2. **Weekly cleanup**: Run this analyzer weekly
|
||||
3. **Automate**: Add CI checks to prevent backlog
|
||||
|
||||
## Stale PRs (>30 days)
|
||||
|
||||
@@ -168,3 +168,62 @@ else
|
||||
fi
|
||||
|
||||
log "Script complete"
|
||||
|
||||
# ─── Stale Branch Cleanup ─────────────────────────────────
|
||||
# Clean up branches from closed (unmerged) PRs and merged PRs
|
||||
log "Checking for stale branches from closed/merged PRs..."
|
||||
|
||||
# Get all open PRs to avoid deleting active branches
|
||||
OPEN_BRANCHES=$(curl -s -H "$AUTH" "$API/repos/$REPO/pulls?state=open&limit=100" | jq -r '.[] | .head.ref' | sort -u)
|
||||
|
||||
# Get all closed PRs (last 100)
|
||||
CLOSED_PRS=$(curl -s -H "$AUTH" "$API/repos/$REPO/pulls?state=closed&limit=100")
|
||||
|
||||
if [ -n "$CLOSED_PRS" ] && [ "$CLOSED_PRS" != "null" ]; then
|
||||
STALE_BRANCHES=$(echo "$CLOSED_PRS" | jq -r '.[] | select(.merged == false) | .head.ref' | sort -u)
|
||||
MERGED_BRANCHES=$(echo "$CLOSED_PRS" | jq -r '.[] | select(.merged == true) | .head.ref' | sort -u)
|
||||
|
||||
STALE_COUNT=0
|
||||
for branch in $STALE_BRANCHES; do
|
||||
# Skip main/master/develop
|
||||
case "$branch" in main|master|develop|HEAD) continue ;; esac
|
||||
|
||||
# SAFETY CHECK: Skip if branch is still used by an open PR
|
||||
if echo "$OPEN_BRANCHES" | grep -q "^$branch$"; then
|
||||
log "Skipping branch '$branch' - still has an open PR"
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ "$DRY_RUN" = "true" ]; then
|
||||
log "DRY RUN: Would delete stale branch '$branch' (from closed unmerged PR)"
|
||||
else
|
||||
curl -s -X DELETE -H "$AUTH" "$API/repos/$REPO/branches/$branch" > /dev/null 2>&1 || true
|
||||
log "Deleted stale branch: $branch"
|
||||
fi
|
||||
STALE_COUNT=$((STALE_COUNT + 1))
|
||||
done
|
||||
|
||||
MERGED_COUNT=0
|
||||
for branch in $MERGED_BRANCHES; do
|
||||
case "$branch" in main|master|develop|HEAD) continue ;; esac
|
||||
|
||||
if echo "$OPEN_BRANCHES" | grep -q "^$branch$"; then
|
||||
log "Skipping branch '$branch' - still has an open PR"
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ "$DRY_RUN" = "true" ]; then
|
||||
log "DRY RUN: Would delete merged branch '$branch'"
|
||||
else
|
||||
curl -s -X DELETE -H "$AUTH" "$API/repos/$REPO/branches/$branch" > /dev/null 2>&1 || true
|
||||
log "Deleted merged branch: $branch"
|
||||
fi
|
||||
MERGED_COUNT=$((MERGED_COUNT + 1))
|
||||
done
|
||||
|
||||
log "Stale branch cleanup:"
|
||||
log " Closed (unmerged) branches: $STALE_COUNT"
|
||||
log " Merged branches: $MERGED_COUNT"
|
||||
else
|
||||
log "Could not fetch closed PRs for branch cleanup"
|
||||
fi
|
||||
|
||||
@@ -1,123 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
PR Backlog Analyzer for timmy-config
|
||||
|
||||
Analyzes open PRs and provides recommendations for cleanup.
|
||||
Issue: #1470
|
||||
"""
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def get_open_prs(repo: str, token: str) -> list:
|
||||
"""Get all open PRs from a repository."""
|
||||
result = subprocess.run([
|
||||
"curl", "-s", "-H", f"Authorization: token {token}",
|
||||
f"https://forge.alexanderwhitestone.com/api/v1/repos/{repo}/pulls?state=open&limit=100"
|
||||
], capture_output=True, text=True)
|
||||
|
||||
if result.returncode != 0:
|
||||
print(f"Error fetching PRs: {result.stderr}")
|
||||
return []
|
||||
|
||||
return json.loads(result.stdout)
|
||||
|
||||
|
||||
def analyze_pr(pr: dict) -> dict:
|
||||
"""Analyze a single PR."""
|
||||
created = datetime.fromisoformat(pr['created_at'].replace('Z', '+00:00'))
|
||||
age_days = (datetime.now(created.tzinfo) - created).days
|
||||
|
||||
labels = [l['name'] for l in pr.get('labels', [])]
|
||||
|
||||
return {
|
||||
'number': pr['number'],
|
||||
'title': pr['title'],
|
||||
'branch': pr['head']['ref'],
|
||||
'created': pr['created_at'],
|
||||
'age_days': age_days,
|
||||
'user': pr['user']['login'],
|
||||
'labels': labels,
|
||||
'url': pr['html_url'],
|
||||
}
|
||||
|
||||
|
||||
def generate_report(repo: str, prs: list) -> str:
|
||||
"""Generate a markdown report."""
|
||||
stale = [p for p in prs if p['age_days'] > 30]
|
||||
recent = [p for p in prs if p['age_days'] <= 7]
|
||||
|
||||
report = f"""# PR Backlog Report — {repo}
|
||||
|
||||
Generated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}
|
||||
|
||||
## Summary
|
||||
|
||||
- **Total Open PRs**: {len(prs)}
|
||||
- **Stale (>30 days)**: {len(stale)}
|
||||
- **Recent (<7 days)**: {len(recent)}
|
||||
|
||||
## Recommendations
|
||||
|
||||
### Immediate Actions
|
||||
1. **Review stale PRs**: {len(stale)} PRs are >30 days old
|
||||
2. **Close duplicates**: Check for duplicate PRs on same issues
|
||||
3. **Assign reviewers**: Ensure each PR has a reviewer
|
||||
|
||||
### Process Improvements
|
||||
1. **Set SLAs**: Review within 48 hours, merge within 7 days
|
||||
2. **Weekly cleanup**: Run this analyzer weekly
|
||||
3. **Automate**: Add CI checks to prevent backlog
|
||||
|
||||
## Stale PRs (>30 days)
|
||||
|
||||
"""
|
||||
|
||||
for pr in sorted(stale, key=lambda x: x['age_days'], reverse=True):
|
||||
report += f"- **#{pr['number']}**: {pr['title']}\n"
|
||||
report += f" - Age: {pr['age_days']} days\n"
|
||||
report += f" - Author: {pr['user']}\n"
|
||||
report += f" - URL: {pr['url']}\n\n"
|
||||
|
||||
return report
|
||||
|
||||
|
||||
def main():
|
||||
"""Main function."""
|
||||
token_path = Path.home() / '.config' / 'gitea' / 'token'
|
||||
if not token_path.exists():
|
||||
print("Error: Gitea token not found")
|
||||
sys.exit(1)
|
||||
|
||||
token = token_path.read_text().strip()
|
||||
repo = "Timmy_Foundation/timmy-config"
|
||||
|
||||
print(f"Fetching PRs for {repo}...")
|
||||
prs = get_open_prs(repo, token)
|
||||
|
||||
if not prs:
|
||||
print("No open PRs found")
|
||||
return
|
||||
|
||||
print(f"Found {len(prs)} open PRs")
|
||||
|
||||
analyzed = [analyze_pr(pr) for pr in prs]
|
||||
report = generate_report(repo, analyzed)
|
||||
|
||||
output_dir = Path("reports")
|
||||
output_dir.mkdir(exist_ok=True)
|
||||
|
||||
report_file = output_dir / f"pr-backlog-{datetime.now().strftime('%Y%m%d')}.md"
|
||||
report_file.write_text(report)
|
||||
|
||||
print(f"Report saved to: {report_file}")
|
||||
print(f"Total PRs: {len(prs)}")
|
||||
print(f"Stale (>30 days): {len([p for p in analyzed if p['age_days'] > 30])}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user