Compare commits
5 Commits
burn/212-f
...
fix/212-de
| Author | SHA1 | Date | |
|---|---|---|---|
| 9b180c1659 | |||
| 580e992814 | |||
| 17e03de983 | |||
| ef6a8d3baf | |||
| 576bded2b3 |
@@ -113,7 +113,8 @@ def find_slow_tests_by_scan(repo_path: str) -> List[Bottleneck]:
|
||||
(r"time\.sleep\((\d+(?:\.\d+)?)\)", "Contains time.sleep() — consider using mock or async wait"),
|
||||
(r"subprocess\.run\(.*timeout=(\d+)", "Subprocess with timeout — may block test"),
|
||||
(r"requests\.(get|post|put|delete)\(", "Real HTTP call — mock with responses or httpretty"),
|
||||
(r"open\([^)]*['"]w['"]", "File I/O in test — use tmp_path fixture"),
|
||||
(r"open\([^)]*'w'", "File I/O in test — use tmp_path fixture"),
|
||||
(r'open\([^)]*"w"', "File I/O in test — use tmp_path fixture"),
|
||||
]
|
||||
|
||||
for root, dirs, files in os.walk(repo_path):
|
||||
@@ -506,13 +507,13 @@ def format_markdown(report: PerfReport) -> str:
|
||||
lines.append(f"- {icon} {b.name}{loc} — ~{b.duration_s:.1f}s — {b.recommendation}")
|
||||
lines.append(f"")
|
||||
|
||||
return "
|
||||
".join(lines)
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
# ── Main ───────────────────────────────────────────────────────────
|
||||
|
||||
def main():
|
||||
global SLOW_TEST_THRESHOLD_S
|
||||
parser = argparse.ArgumentParser(description="Performance Bottleneck Finder")
|
||||
parser.add_argument("--repo", default=".", help="Path to repository to analyze")
|
||||
parser.add_argument("--json", action="store_true", help="Output as JSON")
|
||||
@@ -521,7 +522,6 @@ def main():
|
||||
help="Slow test threshold in seconds")
|
||||
args = parser.parse_args()
|
||||
|
||||
global SLOW_TEST_THRESHOLD_S
|
||||
SLOW_TEST_THRESHOLD_S = args.threshold
|
||||
|
||||
if not os.path.isdir(args.repo):
|
||||
|
||||
@@ -9,24 +9,126 @@ Usage:
|
||||
python3 scripts/refactoring_opportunity_finder.py --output proposals/refactoring_opportunity_finder.json --dry-run
|
||||
"""
|
||||
|
||||
import ast
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional, Tuple
|
||||
|
||||
|
||||
@dataclass
|
||||
class FileMetrics:
|
||||
"""Metrics for a single source file."""
|
||||
path: str
|
||||
lines: int = 0
|
||||
complexity: float = 0.0
|
||||
max_complexity: int = 0
|
||||
functions: int = 0
|
||||
classes: int = 0
|
||||
churn_30d: int = 0
|
||||
churn_90d: int = 0
|
||||
test_coverage: Optional[float] = None
|
||||
refactoring_score: float = 0.0
|
||||
|
||||
|
||||
class _ComplexityVisitor(ast.NodeVisitor):
|
||||
|
||||
def __init__(self):
|
||||
self.functions = []
|
||||
self.classes = 0
|
||||
|
||||
def visit_FunctionDef(self, node):
|
||||
complexity = 1
|
||||
for child in ast.walk(node):
|
||||
if isinstance(child, (ast.If, ast.While, ast.For, ast.ExceptHandler)):
|
||||
complexity += 1
|
||||
elif isinstance(child, ast.BoolOp):
|
||||
complexity += len(child.values) - 1
|
||||
elif isinstance(child, ast.comprehension):
|
||||
complexity += 1
|
||||
for _ in child.ifs:
|
||||
complexity += 1
|
||||
self.functions.append((node.name, complexity))
|
||||
self.generic_visit(node)
|
||||
|
||||
def visit_AsyncFunctionDef(self, node):
|
||||
self.visit_FunctionDef(node)
|
||||
|
||||
def visit_ClassDef(self, node):
|
||||
self.classes += 1
|
||||
self.generic_visit(node)
|
||||
|
||||
|
||||
def compute_file_complexity(filepath: str) -> Tuple[float, int, int, int, int]:
|
||||
"""Compute cyclomatic complexity for a Python file.
|
||||
|
||||
Returns:
|
||||
(avg_complexity, max_complexity, function_count, class_count, line_count)
|
||||
"""
|
||||
try:
|
||||
with open(filepath) as f:
|
||||
source = f.read()
|
||||
tree = ast.parse(source, filename=filepath)
|
||||
except (SyntaxError, UnicodeDecodeError, OSError):
|
||||
return (0.0, 0, 0, 0, 0)
|
||||
|
||||
visitor = _ComplexityVisitor()
|
||||
visitor.visit(tree)
|
||||
line_count = len(source.splitlines())
|
||||
|
||||
if not visitor.functions:
|
||||
return (0.0, 0, 0, visitor.classes, line_count)
|
||||
|
||||
complexities = [c for _, c in visitor.functions]
|
||||
avg = sum(complexities) / len(complexities)
|
||||
max_c = max(complexities)
|
||||
return (round(avg, 1), max_c, len(visitor.functions), visitor.classes, line_count)
|
||||
|
||||
|
||||
def calculate_refactoring_score(metrics: FileMetrics) -> float:
|
||||
"""Calculate a refactoring priority score (0-100).
|
||||
|
||||
Components: complexity (30), size (20), churn (25), coverage (15), structure (10).
|
||||
"""
|
||||
score = 0.0
|
||||
|
||||
if metrics.complexity > 0:
|
||||
score += min(30, metrics.complexity * 2)
|
||||
|
||||
if metrics.lines > 0:
|
||||
score += min(20, metrics.lines / 50)
|
||||
|
||||
churn_score = (metrics.churn_30d * 2) + metrics.churn_90d
|
||||
score += min(25, churn_score * 1.5)
|
||||
|
||||
if metrics.test_coverage is not None:
|
||||
if metrics.test_coverage < 0.5:
|
||||
score += 15 * (1 - metrics.test_coverage)
|
||||
else:
|
||||
score += 15 * (1 - metrics.test_coverage) * 0.3
|
||||
else:
|
||||
score += 7.5
|
||||
|
||||
if metrics.functions > 10:
|
||||
score += min(10, (metrics.functions - 10) * 0.5)
|
||||
|
||||
return round(min(100, max(0, score)), 1)
|
||||
|
||||
|
||||
def generate_proposals():
|
||||
"""Generate sample proposals for this engine."""
|
||||
# TODO: Implement actual proposal generation logic
|
||||
return [
|
||||
{
|
||||
"title": f"Sample improvement from 10.4",
|
||||
"title": "Sample improvement from 10.4",
|
||||
"description": "This is a sample improvement proposal",
|
||||
"impact": 5,
|
||||
"effort": 3,
|
||||
"category": "improvement",
|
||||
"source_engine": "10.4",
|
||||
"timestamp": datetime.now(timezone.utc).isoformat()
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
}
|
||||
]
|
||||
|
||||
@@ -34,12 +136,11 @@ def generate_proposals():
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Finds refactoring opportunities in codebases")
|
||||
parser.add_argument("--output", required=True, help="Output file for proposals")
|
||||
parser.add_argument("--dry-run", action="store_true", help="Don't write output file")
|
||||
|
||||
parser.add_argument("--dry-run", action="store_true", help="Do not write output file")
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
proposals = generate_proposals()
|
||||
|
||||
|
||||
if not args.dry_run:
|
||||
with open(args.output, "w") as f:
|
||||
json.dump({"proposals": proposals}, f, indent=2)
|
||||
|
||||
Reference in New Issue
Block a user