320 lines
9.4 KiB
Python
320 lines
9.4 KiB
Python
|
|
#!/usr/bin/env python3
|
||
|
|
"""
|
||
|
|
Tests for Performance Bottleneck Finder.
|
||
|
|
"""
|
||
|
|
|
||
|
|
import json
|
||
|
|
import os
|
||
|
|
import tempfile
|
||
|
|
import textwrap
|
||
|
|
from pathlib import Path
|
||
|
|
|
||
|
|
import pytest
|
||
|
|
|
||
|
|
# Add scripts to path
|
||
|
|
import sys
|
||
|
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "scripts"))
|
||
|
|
|
||
|
|
from perf_bottleneck_finder import (
|
||
|
|
Bottleneck,
|
||
|
|
PerfReport,
|
||
|
|
find_slow_tests_by_scan,
|
||
|
|
analyze_build_artifacts,
|
||
|
|
analyze_makefile_targets,
|
||
|
|
find_slow_imports,
|
||
|
|
generate_report,
|
||
|
|
format_markdown,
|
||
|
|
severity_sort_key,
|
||
|
|
)
|
||
|
|
|
||
|
|
|
||
|
|
class TestBottleneck:
|
||
|
|
"""Test Bottleneck dataclass."""
|
||
|
|
|
||
|
|
def test_creation(self):
|
||
|
|
b = Bottleneck(
|
||
|
|
category="test",
|
||
|
|
name="test_foo",
|
||
|
|
duration_s=5.0,
|
||
|
|
severity="warning",
|
||
|
|
recommendation="Mock it"
|
||
|
|
)
|
||
|
|
assert b.category == "test"
|
||
|
|
assert b.name == "test_foo"
|
||
|
|
assert b.duration_s == 5.0
|
||
|
|
assert b.severity == "warning"
|
||
|
|
assert b.recommendation == "Mock it"
|
||
|
|
assert b.file_path is None
|
||
|
|
assert b.line_number is None
|
||
|
|
|
||
|
|
def test_with_location(self):
|
||
|
|
b = Bottleneck(
|
||
|
|
category="test",
|
||
|
|
name="test_bar",
|
||
|
|
duration_s=2.0,
|
||
|
|
severity="info",
|
||
|
|
recommendation="Consider",
|
||
|
|
file_path="tests/test_bar.py",
|
||
|
|
line_number=42
|
||
|
|
)
|
||
|
|
assert b.file_path == "tests/test_bar.py"
|
||
|
|
assert b.line_number == 42
|
||
|
|
|
||
|
|
def test_to_dict(self):
|
||
|
|
b = Bottleneck("test", "x", 1.0, "info", "y")
|
||
|
|
d = b.__dict__
|
||
|
|
assert "category" in d
|
||
|
|
assert "duration_s" in d
|
||
|
|
|
||
|
|
|
||
|
|
class TestPerfReport:
|
||
|
|
"""Test PerfReport dataclass."""
|
||
|
|
|
||
|
|
def test_creation(self):
|
||
|
|
report = PerfReport(
|
||
|
|
timestamp="2026-01-01T00:00:00Z",
|
||
|
|
repo_path="/tmp/repo"
|
||
|
|
)
|
||
|
|
assert report.timestamp == "2026-01-01T00:00:00Z"
|
||
|
|
assert report.bottlenecks == []
|
||
|
|
assert report.summary == {}
|
||
|
|
|
||
|
|
def test_to_dict(self):
|
||
|
|
report = PerfReport(
|
||
|
|
timestamp="2026-01-01T00:00:00Z",
|
||
|
|
repo_path="/tmp/repo",
|
||
|
|
bottlenecks=[Bottleneck("test", "x", 1.0, "info", "y")]
|
||
|
|
)
|
||
|
|
d = report.to_dict()
|
||
|
|
assert "bottlenecks" in d
|
||
|
|
assert len(d["bottlenecks"]) == 1
|
||
|
|
|
||
|
|
|
||
|
|
class TestSeveritySort:
|
||
|
|
"""Test severity sorting."""
|
||
|
|
|
||
|
|
def test_critical_first(self):
|
||
|
|
items = [
|
||
|
|
Bottleneck("test", "a", 1.0, "info", ""),
|
||
|
|
Bottleneck("test", "b", 0.5, "critical", ""),
|
||
|
|
Bottleneck("test", "c", 2.0, "warning", ""),
|
||
|
|
]
|
||
|
|
items.sort(key=severity_sort_key)
|
||
|
|
assert items[0].severity == "critical"
|
||
|
|
assert items[1].severity == "warning"
|
||
|
|
assert items[2].severity == "info"
|
||
|
|
|
||
|
|
def test_duration_within_severity(self):
|
||
|
|
items = [
|
||
|
|
Bottleneck("test", "slow", 10.0, "warning", ""),
|
||
|
|
Bottleneck("test", "fast", 1.0, "warning", ""),
|
||
|
|
]
|
||
|
|
items.sort(key=severity_sort_key)
|
||
|
|
assert items[0].name == "slow" # Higher duration first within same severity
|
||
|
|
|
||
|
|
|
||
|
|
class TestSlowTestScan:
|
||
|
|
"""Test slow test pattern scanning."""
|
||
|
|
|
||
|
|
def test_finds_sleep(self, tmp_path):
|
||
|
|
test_file = tmp_path / "test_sleepy.py"
|
||
|
|
test_file.write_text(textwrap.dedent('''
|
||
|
|
import time
|
||
|
|
|
||
|
|
def test_slow():
|
||
|
|
time.sleep(5)
|
||
|
|
assert True
|
||
|
|
'''))
|
||
|
|
|
||
|
|
bottlenecks = find_slow_tests_by_scan(str(tmp_path))
|
||
|
|
assert len(bottlenecks) >= 1
|
||
|
|
assert any("sleep" in b.recommendation.lower() for b in bottlenecks)
|
||
|
|
|
||
|
|
def test_finds_http_calls(self, tmp_path):
|
||
|
|
test_file = tmp_path / "test_http.py"
|
||
|
|
test_file.write_text(textwrap.dedent('''
|
||
|
|
import requests
|
||
|
|
|
||
|
|
def test_api():
|
||
|
|
resp = requests.get("https://example.com")
|
||
|
|
assert resp.status_code == 200
|
||
|
|
'''))
|
||
|
|
|
||
|
|
bottlenecks = find_slow_tests_by_scan(str(tmp_path))
|
||
|
|
assert len(bottlenecks) >= 1
|
||
|
|
assert any("HTTP" in b.recommendation or "mock" in b.recommendation.lower() for b in bottlenecks)
|
||
|
|
|
||
|
|
def test_skips_non_test_files(self, tmp_path):
|
||
|
|
src_file = tmp_path / "main.py"
|
||
|
|
src_file.write_text("import time\ntime.sleep(10)\n")
|
||
|
|
|
||
|
|
bottlenecks = find_slow_tests_by_scan(str(tmp_path))
|
||
|
|
assert len(bottlenecks) == 0
|
||
|
|
|
||
|
|
def test_handles_missing_dir(self):
|
||
|
|
bottlenecks = find_slow_tests_by_scan("/nonexistent/path")
|
||
|
|
assert bottlenecks == []
|
||
|
|
|
||
|
|
def test_file_path_populated(self, tmp_path):
|
||
|
|
test_file = tmp_path / "test_example.py"
|
||
|
|
test_file.write_text("import time\n\ndef test_it():\n time.sleep(2)\n")
|
||
|
|
|
||
|
|
bottlenecks = find_slow_tests_by_scan(str(tmp_path))
|
||
|
|
assert len(bottlenecks) >= 1
|
||
|
|
assert bottlenecks[0].file_path is not None
|
||
|
|
assert bottlenecks[0].line_number is not None
|
||
|
|
|
||
|
|
|
||
|
|
class TestBuildArtifacts:
|
||
|
|
"""Test build artifact analysis."""
|
||
|
|
|
||
|
|
def test_finds_large_node_modules(self, tmp_path):
|
||
|
|
nm = tmp_path / "node_modules"
|
||
|
|
nm.mkdir()
|
||
|
|
# Create a file > 10MB
|
||
|
|
big_file = nm / "big.txt"
|
||
|
|
big_file.write_bytes(b"x" * (11 * 1024 * 1024))
|
||
|
|
|
||
|
|
bottlenecks = analyze_build_artifacts(str(tmp_path))
|
||
|
|
assert len(bottlenecks) >= 1
|
||
|
|
assert any("node_modules" in b.name for b in bottlenecks)
|
||
|
|
|
||
|
|
def test_ignores_small_dirs(self, tmp_path):
|
||
|
|
nm = tmp_path / "node_modules"
|
||
|
|
nm.mkdir()
|
||
|
|
small_file = nm / "small.txt"
|
||
|
|
small_file.write_bytes(b"x" * 100)
|
||
|
|
|
||
|
|
bottlenecks = analyze_build_artifacts(str(tmp_path))
|
||
|
|
assert not any("node_modules" in b.name for b in bottlenecks)
|
||
|
|
|
||
|
|
def test_finds_pycache(self, tmp_path):
|
||
|
|
cache = tmp_path / "__pycache__"
|
||
|
|
cache.mkdir()
|
||
|
|
big_file = cache / "big.pyc"
|
||
|
|
big_file.write_bytes(b"x" * (11 * 1024 * 1024))
|
||
|
|
|
||
|
|
bottlenecks = analyze_build_artifacts(str(tmp_path))
|
||
|
|
assert any("__pycache__" in b.name for b in bottlenecks)
|
||
|
|
|
||
|
|
|
||
|
|
class TestMakefileAnalysis:
|
||
|
|
"""Test Makefile analysis."""
|
||
|
|
|
||
|
|
def test_finds_pip_install(self, tmp_path):
|
||
|
|
makefile = tmp_path / "Makefile"
|
||
|
|
makefile.write_text(textwrap.dedent('''
|
||
|
|
install:
|
||
|
|
pip install -r requirements.txt
|
||
|
|
|
||
|
|
test:
|
||
|
|
pytest
|
||
|
|
'''))
|
||
|
|
|
||
|
|
bottlenecks = analyze_makefile_targets(str(tmp_path))
|
||
|
|
assert len(bottlenecks) >= 1
|
||
|
|
|
||
|
|
def test_no_makefile(self, tmp_path):
|
||
|
|
bottlenecks = analyze_makefile_targets(str(tmp_path))
|
||
|
|
assert bottlenecks == []
|
||
|
|
|
||
|
|
|
||
|
|
class TestImportAnalysis:
|
||
|
|
"""Test heavy import detection."""
|
||
|
|
|
||
|
|
def test_finds_pandas(self, tmp_path):
|
||
|
|
src = tmp_path / "analysis.py"
|
||
|
|
src.write_text("import pandas as pd\n")
|
||
|
|
|
||
|
|
bottlenecks = find_slow_imports(str(tmp_path))
|
||
|
|
assert len(bottlenecks) >= 1
|
||
|
|
assert any("pandas" in b.name for b in bottlenecks)
|
||
|
|
|
||
|
|
def test_finds_torch(self, tmp_path):
|
||
|
|
src = tmp_path / "model.py"
|
||
|
|
src.write_text("import torch\n")
|
||
|
|
|
||
|
|
bottlenecks = find_slow_imports(str(tmp_path))
|
||
|
|
assert any("torch" in b.name for b in bottlenecks)
|
||
|
|
|
||
|
|
def test_skips_light_imports(self, tmp_path):
|
||
|
|
src = tmp_path / "utils.py"
|
||
|
|
src.write_text("import json\nimport os\nimport sys\n")
|
||
|
|
|
||
|
|
bottlenecks = find_slow_imports(str(tmp_path))
|
||
|
|
assert len(bottlenecks) == 0
|
||
|
|
|
||
|
|
|
||
|
|
class TestGenerateReport:
|
||
|
|
"""Test full report generation."""
|
||
|
|
|
||
|
|
def test_empty_repo(self, tmp_path):
|
||
|
|
report = generate_report(str(tmp_path))
|
||
|
|
assert report.summary["total_bottlenecks"] >= 0
|
||
|
|
assert "critical" in report.summary
|
||
|
|
assert "warning" in report.summary
|
||
|
|
|
||
|
|
def test_with_findings(self, tmp_path):
|
||
|
|
# Create a test file with issues
|
||
|
|
test_file = tmp_path / "test_slow.py"
|
||
|
|
test_file.write_text(textwrap.dedent('''
|
||
|
|
import time
|
||
|
|
import requests
|
||
|
|
|
||
|
|
def test_sleepy():
|
||
|
|
time.sleep(3)
|
||
|
|
|
||
|
|
def test_http():
|
||
|
|
requests.get("https://example.com")
|
||
|
|
'''))
|
||
|
|
|
||
|
|
report = generate_report(str(tmp_path))
|
||
|
|
assert report.summary["total_bottlenecks"] >= 2
|
||
|
|
assert len(report.bottlenecks) > 0
|
||
|
|
|
||
|
|
def test_summary_categories(self, tmp_path):
|
||
|
|
report = generate_report(str(tmp_path))
|
||
|
|
assert "by_category" in report.summary
|
||
|
|
|
||
|
|
|
||
|
|
class TestMarkdownReport:
|
||
|
|
"""Test markdown output."""
|
||
|
|
|
||
|
|
def test_format(self):
|
||
|
|
report = PerfReport(
|
||
|
|
timestamp="2026-01-01T00:00:00Z",
|
||
|
|
repo_path="/tmp/repo",
|
||
|
|
bottlenecks=[
|
||
|
|
Bottleneck("test", "slow_test", 5.0, "critical", "Fix it")
|
||
|
|
],
|
||
|
|
summary={
|
||
|
|
"total_bottlenecks": 1,
|
||
|
|
"critical": 1,
|
||
|
|
"warning": 0,
|
||
|
|
"info": 0,
|
||
|
|
"estimated_total_slowdown_s": 5.0,
|
||
|
|
"by_category": {"test": 1},
|
||
|
|
}
|
||
|
|
)
|
||
|
|
md = format_markdown(report)
|
||
|
|
assert "# Performance Bottleneck Report" in md
|
||
|
|
assert "slow_test" in md
|
||
|
|
assert "🔴" in md
|
||
|
|
assert "Fix it" in md
|
||
|
|
|
||
|
|
def test_empty_report(self):
|
||
|
|
report = PerfReport(
|
||
|
|
timestamp="2026-01-01T00:00:00Z",
|
||
|
|
repo_path="/tmp/repo",
|
||
|
|
summary={
|
||
|
|
"total_bottlenecks": 0,
|
||
|
|
"critical": 0,
|
||
|
|
"warning": 0,
|
||
|
|
"info": 0,
|
||
|
|
"estimated_total_slowdown_s": 0,
|
||
|
|
"by_category": {},
|
||
|
|
}
|
||
|
|
)
|
||
|
|
md = format_markdown(report)
|
||
|
|
assert "Total bottlenecks:** 0" in md
|