Compare commits
1 Commits
step35/87-
...
step35/171
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8374ec937e |
@@ -70,37 +70,38 @@ class PerfReport:
|
||||
# ── Test Analysis ──────────────────────────────────────────────────
|
||||
|
||||
def find_slow_tests_pytest(repo_path: str) -> List[Bottleneck]:
|
||||
"""Run pytest --durations and parse slow tests."""
|
||||
"""Run pytest with --durations and parse slow test output."""
|
||||
bottlenecks = []
|
||||
|
||||
# Try to run pytest with durations
|
||||
try:
|
||||
# Run pytest to get slowest tests; maxfail=1 avoids hanging on failures
|
||||
result = subprocess.run(
|
||||
["python3", "-m", "pytest", "--co", "-q", "--durations=0"],
|
||||
cwd=repo_path, capture_output=True, text=True, timeout=30
|
||||
["python3", "-m", "pytest", "-q",
|
||||
f"--durations={PYTEST_DURATIONS_COUNT}", "--tb=no", "--maxfail=1"],
|
||||
cwd=repo_path, capture_output=True, text=True, timeout=60
|
||||
)
|
||||
# If tests exist, try to get durations from last run
|
||||
durations_file = os.path.join(repo_path, ".pytest_cache", "v", "cache", "durations")
|
||||
if os.path.exists(durations_file):
|
||||
with open(durations_file) as f:
|
||||
for line in f:
|
||||
parts = line.strip().split()
|
||||
if len(parts) >= 2:
|
||||
try:
|
||||
duration = float(parts[0])
|
||||
test_name = " ".join(parts[1:])
|
||||
if duration > SLOW_TEST_THRESHOLD_S:
|
||||
severity = "critical" if duration > 10 else "warning"
|
||||
bottlenecks.append(Bottleneck(
|
||||
category="test",
|
||||
name=test_name,
|
||||
duration_s=duration,
|
||||
severity=severity,
|
||||
recommendation=f"Test takes {duration:.1f}s. Consider mocking slow I/O, using fixtures, or marking with @pytest.mark.slow."
|
||||
))
|
||||
except ValueError:
|
||||
continue
|
||||
except (subprocess.TimeoutExpired, FileNotFoundError):
|
||||
# Parse durations from stdout.
|
||||
# Lines look like: " 3.45s call test_file.py::test_name"
|
||||
for line in result.stdout.splitlines():
|
||||
line = line.strip()
|
||||
m = re.match(r'^(\d+\.?\d*)s\s+(call|setup|teardown)\s+(.+)$', line)
|
||||
if not m:
|
||||
continue
|
||||
try:
|
||||
duration = float(m.group(1))
|
||||
test_name = m.group(3).strip()
|
||||
if duration > SLOW_TEST_THRESHOLD_S:
|
||||
severity = "critical" if duration > 10 else "warning"
|
||||
bottlenecks.append(Bottleneck(
|
||||
category="test",
|
||||
name=test_name,
|
||||
duration_s=duration,
|
||||
severity=severity,
|
||||
recommendation=f"Test takes {duration:.1f}s. Consider mocking slow I/O, using fixtures, or marking with @pytest.mark.slow."
|
||||
))
|
||||
except ValueError:
|
||||
continue
|
||||
except (subprocess.TimeoutExpired, FileNotFoundError, PermissionError):
|
||||
pass
|
||||
|
||||
return bottlenecks
|
||||
|
||||
@@ -166,5 +166,6 @@ def _():
|
||||
assert_true(s in TIME_PER_POINT, f"Missing time for score {s}")
|
||||
|
||||
|
||||
print(f"\n=== Results: {PASS} passed, {FAIL} failed ===")
|
||||
sys.exit(0 if FAIL == 0 else 1)
|
||||
if __name__ == "__main__":
|
||||
print(f"\n=== Results: {PASS} passed, {FAIL} failed ===")
|
||||
sys.exit(0 if FAIL == 0 else 1)
|
||||
|
||||
Reference in New Issue
Block a user