Some checks failed
Architecture Lint / Lint Repository (push) Has been cancelled
Architecture Lint / Linter Tests (push) Has been cancelled
Smoke Test / smoke (push) Has been cancelled
Validate Config / JSON Validate (push) Has been cancelled
Validate Config / Python Syntax & Import Check (push) Has been cancelled
Validate Config / Python Test Suite (push) Has been cancelled
Validate Config / Shell Script Lint (push) Has been cancelled
Validate Config / Cron Syntax Check (push) Has been cancelled
Validate Config / Deploy Script Dry Run (push) Has been cancelled
Validate Config / Playbook Schema Validation (push) Has been cancelled
Validate Config / YAML Lint (push) Has been cancelled
Merge PR #567
149 lines
5.0 KiB
Python
149 lines
5.0 KiB
Python
#!/usr/bin/env python3
|
|
"""Tests for matrix_glitch_detect.py — verifies detection and HTML report logic."""
|
|
|
|
import json
|
|
import sys
|
|
import tempfile
|
|
from pathlib import Path
|
|
|
|
sys.path.insert(0, str(Path(__file__).parent.parent / "scripts"))
|
|
|
|
from matrix_glitch_detect import (
|
|
Severity, Glitch, GlitchReport,
|
|
format_report, generate_html_report, _parse_json_response,
|
|
)
|
|
|
|
|
|
def test_parse_json_clean():
|
|
result = _parse_json_response('{"glitches": [], "overall_quality": 95}')
|
|
assert result["overall_quality"] == 95
|
|
print(" PASS: test_parse_json_clean")
|
|
|
|
|
|
def test_parse_json_fenced():
|
|
result = _parse_json_response('```json\n{"overall_quality": 80}\n```')
|
|
assert result["overall_quality"] == 80
|
|
print(" PASS: test_parse_json_fenced")
|
|
|
|
|
|
def test_parse_json_garbage():
|
|
assert _parse_json_response("no json") == {}
|
|
print(" PASS: test_parse_json_garbage")
|
|
|
|
|
|
def test_glitch_dataclass():
|
|
g = Glitch(type="z_fighting", severity=Severity.MAJOR, region="center", description="Shimmer", confidence=0.8)
|
|
assert g.type == "z_fighting"
|
|
assert g.confidence == 0.8
|
|
print(" PASS: test_glitch_dataclass")
|
|
|
|
|
|
def test_report_dataclass():
|
|
r = GlitchReport(source="test.png", status="WARN", score=75)
|
|
r.glitches.append(Glitch(type="float", severity=Severity.MINOR))
|
|
assert len(r.glitches) == 1
|
|
assert r.score == 75
|
|
print(" PASS: test_report_dataclass")
|
|
|
|
|
|
def test_format_json():
|
|
r = GlitchReport(source="test.png", status="PASS", score=90, summary="Clean")
|
|
r.glitches.append(Glitch(type="cosmetic", severity=Severity.COSMETIC, description="Minor"))
|
|
output = format_report(r, "json")
|
|
parsed = json.loads(output)
|
|
assert parsed["status"] == "PASS"
|
|
assert len(parsed["glitches"]) == 1
|
|
print(" PASS: test_format_json")
|
|
|
|
|
|
def test_format_text():
|
|
r = GlitchReport(source="test.png", status="FAIL", score=30, summary="Critical glitch")
|
|
r.glitches.append(Glitch(type="render_failure", severity=Severity.CRITICAL, description="Black screen"))
|
|
output = format_report(r, "text")
|
|
assert "FAIL" in output
|
|
assert "render_failure" in output
|
|
print(" PASS: test_format_text")
|
|
|
|
|
|
def test_html_report_basic():
|
|
r = GlitchReport(source="test.png", status="PASS", score=100)
|
|
html = generate_html_report([r], title="Test Report")
|
|
assert "<!DOCTYPE html>" in html
|
|
assert "Test Report" in html
|
|
assert "PASS" in html
|
|
assert "100" in html
|
|
print(" PASS: test_html_report_basic")
|
|
|
|
|
|
def test_html_report_with_glitches():
|
|
r = GlitchReport(source="test.png", status="FAIL", score=40)
|
|
r.glitches.append(Glitch(type="z_fighting", severity=Severity.CRITICAL, region="center", description="Heavy flicker", confidence=0.9))
|
|
r.glitches.append(Glitch(type="clipping", severity=Severity.MINOR, region="bottom", description="Object through floor", confidence=0.6))
|
|
html = generate_html_report([r], title="Glitch Report")
|
|
assert "z_fighting" in html
|
|
assert "CRITICAL" in html
|
|
assert "clipping" in html
|
|
assert "Heavy flicker" in html
|
|
print(" PASS: test_html_report_with_glitches")
|
|
|
|
|
|
def test_html_report_multi():
|
|
r1 = GlitchReport(source="a.png", status="PASS", score=95)
|
|
r2 = GlitchReport(source="b.png", status="WARN", score=70)
|
|
r2.glitches.append(Glitch(type="texture_pop", severity=Severity.MAJOR))
|
|
html = generate_html_report([r1, r2])
|
|
assert "a.png" in html
|
|
assert "b.png" in html
|
|
assert "2" in html # 2 screenshots
|
|
print(" PASS: test_html_report_multi")
|
|
|
|
|
|
def test_html_self_contained():
|
|
r = GlitchReport(source="test.png", status="PASS", score=100)
|
|
html = generate_html_report([r])
|
|
assert "external" not in html.lower() or "no external dependencies" in html.lower()
|
|
assert "<style>" in html # Inline CSS
|
|
print(" PASS: test_html_self_contained")
|
|
|
|
|
|
def test_missing_image():
|
|
r = GlitchReport(source="/nonexistent/image.png")
|
|
# detect_glitches would set FAIL — simulate
|
|
r.status = "FAIL"
|
|
r.score = 0
|
|
r.summary = "File not found"
|
|
assert r.status == "FAIL"
|
|
print(" PASS: test_missing_image")
|
|
|
|
|
|
def test_severity_enum():
|
|
assert Severity.CRITICAL.value == "critical"
|
|
assert Severity.MAJOR.value == "major"
|
|
print(" PASS: test_severity_enum")
|
|
|
|
|
|
def run_all():
|
|
print("=== matrix_glitch_detect tests ===")
|
|
tests = [
|
|
test_parse_json_clean, test_parse_json_fenced, test_parse_json_garbage,
|
|
test_glitch_dataclass, test_report_dataclass,
|
|
test_format_json, test_format_text,
|
|
test_html_report_basic, test_html_report_with_glitches,
|
|
test_html_report_multi, test_html_self_contained,
|
|
test_missing_image, test_severity_enum,
|
|
]
|
|
passed = failed = 0
|
|
for t in tests:
|
|
try:
|
|
t()
|
|
passed += 1
|
|
except Exception as e:
|
|
print(f" FAIL: {t.__name__} — {e}")
|
|
failed += 1
|
|
print(f"\n{'ALL PASSED' if failed == 0 else f'{failed} FAILED'}: {passed}/{len(tests)}")
|
|
return failed == 0
|
|
|
|
|
|
if __name__ == "__main__":
|
|
sys.exit(0 if run_all() else 1)
|