diff --git a/tasks.py b/tasks.py index 35bafd85..6d7c3f49 100644 --- a/tasks.py +++ b/tasks.py @@ -2247,32 +2247,38 @@ def cross_review_prs(): return {"reviews": len(results), "details": results} -@huey.periodic_task(crontab(hour="*/12")) -def decomposition_tick(): - """Force Multiplier 12: Automated Issue Decomposition (Epic Splitting). +@huey.periodic_task(crontab(hour="0", minute="0")) +def metrics_tick(): + """Force Multiplier 13: Fleet Cost & Velocity Tracker. - Identifies large issues (EPICs) and flags them for decomposition into sub-tasks. + Calculates daily velocity (issues closed, PRs merged) and logs it to a metrics file. """ gitea = get_gitea_client() repos = ["Timmy_Foundation/timmy-config", "Timmy_Foundation/timmy-home", "Timmy_Foundation/the-nexus"] + daily_stats = { + "timestamp": datetime.now(timezone.utc).isoformat(), + "issues_closed": 0, + "prs_merged": 0, + "repo_stats": {} + } + for repo in repos: - issues = gitea.get_open_issues(repo) - for issue in issues: - labels = [l['name'] for l in issue.get('labels', [])] - body = issue.get('body', '') - - # Identify EPICs or large task lists - is_epic = "EPIC" in labels or "EPIC" in issue['title'] - has_large_tasklist = body.count("- [ ]") > 5 - - if (is_epic or has_large_tasklist) and "needs-decomposition" not in labels: - audit_log("decomposition_flagged", "system", {"repo": repo, "issue": issue['number']}, confidence="High") - gitea.add_label(repo, issue['number'], "needs-decomposition") - - # Add comment suggesting decomposition - msg = "### Automated Decomposition Suggestion\n" - msg += "This issue has been flagged as an **EPIC** or contains a large task list. " - msg += "To improve velocity and reduce cycle time, I recommend splitting this into smaller, atomic issues.\n\n" - msg += "I will attempt to generate a decomposition plan in the next cycle." - gitea.create_issue_comment(repo, issue['number'], msg) + # Get issues closed in last 24h + closed_issues = gitea.get_closed_issues(repo, since="24h") + merged_prs = gitea.get_merged_prs(repo, since="24h") + + daily_stats["issues_closed"] += len(closed_issues) + daily_stats["prs_merged"] += len(merged_prs) + daily_stats["repo_stats"][repo] = { + "issues": len(closed_issues), + "prs": len(merged_prs) + } + + metrics_file = TIMMY_HOME / "logs" / "metrics.jsonl" + metrics_file.parent.mkdir(parents=True, exist_ok=True) + + with open(metrics_file, "a") as f: + f.write(json.dumps(daily_stats) + "\n") + + audit_log("metrics_logged", "system", daily_stats, confidence="High")