feat: weekly progress update system for TurboQuant (#76)
All checks were successful
Smoke Test / smoke (pull_request) Successful in 14s
All checks were successful
Smoke Test / smoke (pull_request) Successful in 14s
- scripts/weekly_update.py: Auto-generates weekly update from git log, Gitea API (issues/PRs/blockers), and benchmark results. Supports --post to issue #76, --json for raw data, --since for date range. - scripts/weekly_update.sh: Shell wrapper for convenience. - docs/WEEKLY_TEMPLATE.md: Manual update template. - docs/PROJECT_STATUS.md: Added Weekly Progress Updates section with process (weekly cadence, benchmark-as-happens, blocker escalation). - tests/test_weekly_update.py: Validates script runs, JSON output, and handles edge cases.
This commit is contained in:
323
scripts/weekly_update.py
Normal file
323
scripts/weekly_update.py
Normal file
@@ -0,0 +1,323 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
TurboQuant Weekly Progress Update Generator
|
||||
|
||||
Generates a structured weekly update from:
|
||||
- Git log (commits since last week)
|
||||
- Open/closed issues and PRs
|
||||
- Benchmark results
|
||||
- Blockers (open issues labeled 'blocker')
|
||||
|
||||
Usage:
|
||||
python3 scripts/weekly_update.py # This week
|
||||
python3 scripts/weekly_update.py --since 2026-04-08 # Custom range
|
||||
python3 scripts/weekly_update.py --post # Post as Gitea comment on tracking issue
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
|
||||
try:
|
||||
import requests
|
||||
HAS_REQUESTS = True
|
||||
except ImportError:
|
||||
HAS_REQUESTS = False
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parent.parent
|
||||
GITEA_URL = "https://forge.alexanderwhitestone.com"
|
||||
REPO_PATH = "Timmy_Foundation/turboquant"
|
||||
TRACKING_ISSUE = 76 # This issue
|
||||
|
||||
|
||||
def git_log(since: str, until: str = None) -> list[dict]:
|
||||
"""Get commits since a date."""
|
||||
until = until or datetime.now().strftime("%Y-%m-%d")
|
||||
cmd = [
|
||||
"git", "-C", str(REPO_ROOT), "log",
|
||||
f"--since={since}", f"--until={until}",
|
||||
"--format=%H|%an|%ae|%aI|%s",
|
||||
"--all"
|
||||
]
|
||||
result = subprocess.run(cmd, capture_output=True, text=True)
|
||||
commits = []
|
||||
for line in result.stdout.strip().split("\n"):
|
||||
if not line:
|
||||
continue
|
||||
parts = line.split("|", 4)
|
||||
if len(parts) == 5:
|
||||
commits.append({
|
||||
"hash": parts[0][:8],
|
||||
"author": parts[1],
|
||||
"email": parts[2],
|
||||
"date": parts[3][:10],
|
||||
"subject": parts[4],
|
||||
})
|
||||
return commits
|
||||
|
||||
|
||||
def git_diff_stats(since: str) -> dict:
|
||||
"""Get file change stats."""
|
||||
cmd = [
|
||||
"git", "-C", str(REPO_ROOT), "diff",
|
||||
f"--stat", f"{since}..HEAD"
|
||||
]
|
||||
result = subprocess.run(cmd, capture_output=True, text=True)
|
||||
lines = result.stdout.strip().split("\n")
|
||||
summary = lines[-1] if lines else "No changes"
|
||||
return {"summary": summary, "files_changed": len([l for l in lines if "|" in l])}
|
||||
|
||||
|
||||
def find_benchmarks() -> list[dict]:
|
||||
"""Scan benchmark results directory for recent results."""
|
||||
bench_dir = REPO_ROOT / "benchmarks"
|
||||
results = []
|
||||
if not bench_dir.exists():
|
||||
return results
|
||||
|
||||
for f in bench_dir.glob("*.json"):
|
||||
try:
|
||||
data = json.loads(f.read_text())
|
||||
results.append({"file": f.name, "data": data})
|
||||
except (json.JSONDecodeError, Exception):
|
||||
pass
|
||||
|
||||
# Also check for markdown reports
|
||||
for f in bench_dir.glob("*.md"):
|
||||
if f.name != "README.md":
|
||||
stat = f.stat()
|
||||
results.append({
|
||||
"file": f.name,
|
||||
"type": "report",
|
||||
"modified": datetime.fromtimestamp(stat.st_mtime).strftime("%Y-%m-%d"),
|
||||
"size": stat.st_size,
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def get_gitea_state(token: str = None) -> dict:
|
||||
"""Fetch issue/PR state from Gitea API."""
|
||||
if not HAS_REQUESTS or not token:
|
||||
return {"available": False}
|
||||
|
||||
H = {"Authorization": f"token {token}"}
|
||||
base = f"{GITEA_URL}/api/v1/repos/{REPO_PATH}"
|
||||
|
||||
try:
|
||||
# Open issues
|
||||
r = requests.get(f"{base}/issues?state=open&limit=100", headers=H)
|
||||
open_issues = r.json() if r.status_code == 200 else []
|
||||
|
||||
# Closed issues (recent)
|
||||
r = requests.get(f"{base}/issues?state=closed&limit=50&sort=updated&order=desc", headers=H)
|
||||
closed_issues = r.json() if r.status_code == 200 else []
|
||||
|
||||
# PRs
|
||||
r = requests.get(f"{base}/pulls?state=open&limit=50", headers=H)
|
||||
open_prs = r.json() if r.status_code == 200 else []
|
||||
|
||||
return {
|
||||
"available": True,
|
||||
"open_issues": open_issues,
|
||||
"closed_issues": closed_issues,
|
||||
"open_prs": open_prs,
|
||||
}
|
||||
except Exception as e:
|
||||
return {"available": False, "error": str(e)}
|
||||
|
||||
|
||||
def categorize_commits(commits: list[dict]) -> dict:
|
||||
"""Categorize commits by conventional prefix."""
|
||||
categories = {
|
||||
"feat": [], "fix": [], "bench": [], "docs": [],
|
||||
"test": [], "refactor": [], "chore": [], "other": []
|
||||
}
|
||||
for c in commits:
|
||||
subject = c["subject"].lower()
|
||||
if subject.startswith("feat") or subject.startswith("feature"):
|
||||
categories["feat"].append(c)
|
||||
elif subject.startswith("fix"):
|
||||
categories["fix"].append(c)
|
||||
elif subject.startswith("bench") or subject.startswith("perf"):
|
||||
categories["bench"].append(c)
|
||||
elif subject.startswith("doc"):
|
||||
categories["docs"].append(c)
|
||||
elif subject.startswith("test"):
|
||||
categories["test"].append(c)
|
||||
elif subject.startswith("refactor"):
|
||||
categories["refactor"].append(c)
|
||||
elif subject.startswith("chore") or subject.startswith("ci"):
|
||||
categories["chore"].append(c)
|
||||
else:
|
||||
categories["other"].append(c)
|
||||
return {k: v for k, v in categories.items() if v}
|
||||
|
||||
|
||||
def generate_update(since: str, gitea_state: dict = None) -> str:
|
||||
"""Generate the weekly update markdown."""
|
||||
now = datetime.now()
|
||||
until = now.strftime("%Y-%m-%d")
|
||||
week_label = f"Week of {since} to {until}"
|
||||
|
||||
commits = git_log(since, until)
|
||||
diff_stats = git_diff_stats(since)
|
||||
categories = categorize_commits(commits)
|
||||
benchmarks = find_benchmarks()
|
||||
|
||||
lines = [
|
||||
f"## {week_label}",
|
||||
"",
|
||||
f"**Generated:** {now.strftime('%Y-%m-%d %H:%M UTC')}",
|
||||
f"**Commits:** {len(commits)} | **Files changed:** {diff_stats['files_changed']}",
|
||||
"",
|
||||
]
|
||||
|
||||
# Completed work by category
|
||||
lines.append("### Completed")
|
||||
lines.append("")
|
||||
if commits:
|
||||
for cat, items in categories.items():
|
||||
label = {
|
||||
"feat": "Features", "fix": "Fixes", "bench": "Benchmarks",
|
||||
"docs": "Documentation", "test": "Tests", "refactor": "Refactoring",
|
||||
"chore": "Maintenance", "other": "Other"
|
||||
}.get(cat, cat)
|
||||
lines.append(f"**{label}:**")
|
||||
for c in items:
|
||||
lines.append(f"- `{c['hash']}` {c['subject']} ({c['author']}, {c['date']})")
|
||||
lines.append("")
|
||||
else:
|
||||
lines.append("- No commits this week")
|
||||
lines.append("")
|
||||
|
||||
# Benchmark results
|
||||
if benchmarks:
|
||||
lines.append("### Benchmark Results")
|
||||
lines.append("")
|
||||
for b in benchmarks:
|
||||
if b.get("type") == "report":
|
||||
lines.append(f"- **{b['file']}** (updated {b['modified']}, {b['size']} bytes)")
|
||||
else:
|
||||
lines.append(f"- **{b['file']}** — see `benchmarks/{b['file']}`")
|
||||
lines.append("")
|
||||
|
||||
# Gitea state (if available)
|
||||
if gitea_state and gitea_state.get("available"):
|
||||
open_issues = gitea_state["open_issues"]
|
||||
open_prs = gitea_state["open_prs"]
|
||||
closed = gitea_state["closed_issues"]
|
||||
|
||||
lines.append("### In Progress")
|
||||
lines.append("")
|
||||
blockers = []
|
||||
for issue in open_issues:
|
||||
labels = [l["name"] for l in issue.get("labels", [])]
|
||||
prefix = ""
|
||||
if "blocker" in labels:
|
||||
blockers.append(issue)
|
||||
prefix = "🚧 BLOCKER — "
|
||||
assignee = issue.get("assignee", {})
|
||||
who = assignee.get("login", "unassigned") if assignee else "unassigned"
|
||||
lines.append(f"- {prefix}#{issue['number']}: {issue['title']} ({who})")
|
||||
|
||||
if open_prs:
|
||||
lines.append("")
|
||||
lines.append("**Open PRs:**")
|
||||
for pr in open_prs:
|
||||
lines.append(f"- #{pr['number']}: {pr['title']} ({pr['user']['login']})")
|
||||
lines.append("")
|
||||
|
||||
# Blockers
|
||||
if blockers:
|
||||
lines.append("### Blockers")
|
||||
lines.append("")
|
||||
for b in blockers:
|
||||
lines.append(f"- #{b['number']}: {b['title']}")
|
||||
if b.get("body"):
|
||||
snippet = b["body"][:200].replace("\n", " ")
|
||||
lines.append(f" > {snippet}...")
|
||||
lines.append("")
|
||||
|
||||
# Recently closed
|
||||
recent_closed = [i for i in closed if i.get("closed_at")]
|
||||
if recent_closed:
|
||||
lines.append("### Closed This Period")
|
||||
lines.append("")
|
||||
for issue in recent_closed[:10]:
|
||||
closed_date = issue.get("closed_at", "")[:10]
|
||||
lines.append(f"- #{issue['number']}: {issue['title']} (closed {closed_date})")
|
||||
lines.append("")
|
||||
|
||||
# Next week
|
||||
lines.append("### Next Week")
|
||||
lines.append("")
|
||||
lines.append("- _TBD — fill in planned work_")
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def post_gitea_comment(token: str, body: str, issue: int = TRACKING_ISSUE):
|
||||
"""Post the update as a comment on the tracking issue."""
|
||||
if not HAS_REQUESTS:
|
||||
print("ERROR: requests library not available", file=sys.stderr)
|
||||
return False
|
||||
|
||||
H = {"Authorization": f"token {token}", "Content-Type": "application/json"}
|
||||
url = f"{GITEA_URL}/api/v1/repos/{REPO_PATH}/issues/{issue}/comments"
|
||||
r = requests.post(url, headers=H, json={"body": body})
|
||||
|
||||
if r.status_code in (200, 201):
|
||||
print(f"Posted comment on issue #{issue}")
|
||||
return True
|
||||
else:
|
||||
print(f"Failed to post: {r.status_code} {r.text}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Generate TurboQuant weekly progress update")
|
||||
parser.add_argument("--since", help="Start date (YYYY-MM-DD), default: 7 days ago")
|
||||
parser.add_argument("--post", action="store_true", help="Post as Gitea comment on issue #76")
|
||||
parser.add_argument("--json", action="store_true", help="Output raw data as JSON")
|
||||
args = parser.parse_args()
|
||||
|
||||
since = args.since or (datetime.now() - timedelta(days=7)).strftime("%Y-%m-%d")
|
||||
|
||||
# Try to load Gitea token
|
||||
token = None
|
||||
token_path = Path.home() / ".config" / "gitea" / "token"
|
||||
if token_path.exists():
|
||||
token = token_path.read_text().strip()
|
||||
|
||||
gitea_state = get_gitea_state(token) if token else {"available": False}
|
||||
|
||||
if args.json:
|
||||
data = {
|
||||
"since": since,
|
||||
"commits": git_log(since),
|
||||
"benchmarks": find_benchmarks(),
|
||||
"gitea": {k: v for k, v in gitea_state.items() if k != "available"} if gitea_state.get("available") else None,
|
||||
}
|
||||
print(json.dumps(data, indent=2, default=str))
|
||||
return
|
||||
|
||||
update = generate_update(since, gitea_state)
|
||||
|
||||
if args.post:
|
||||
if not token:
|
||||
print("ERROR: No Gitea token found at ~/.config/gitea/token", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
post_gitea_comment(token, update)
|
||||
else:
|
||||
print(update)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user