Some checks failed
Architecture Lint / Linter Tests (push) Successful in 15s
Smoke Test / smoke (push) Failing after 14s
Validate Config / YAML Lint (push) Failing after 13s
Validate Config / JSON Validate (push) Successful in 13s
Validate Config / Shell Script Lint (push) Failing after 40s
Validate Config / Python Syntax & Import Check (push) Failing after 58s
Validate Config / Python Test Suite (push) Has been skipped
Validate Config / Cron Syntax Check (push) Successful in 11s
Validate Config / Deploy Script Dry Run (push) Successful in 11s
Validate Config / Playbook Schema Validation (push) Successful in 18s
Architecture Lint / Lint Repository (push) Failing after 13s
Architecture Lint / Linter Tests (pull_request) Successful in 26s
Smoke Test / smoke (pull_request) Failing after 17s
Validate Config / YAML Lint (pull_request) Failing after 12s
Validate Config / JSON Validate (pull_request) Successful in 12s
PR Checklist / pr-checklist (pull_request) Failing after 3m36s
Validate Config / Shell Script Lint (pull_request) Failing after 40s
Validate Config / Python Syntax & Import Check (pull_request) Failing after 1m4s
Validate Config / Python Test Suite (pull_request) Has been skipped
Validate Config / Cron Syntax Check (pull_request) Successful in 9s
Validate Config / Deploy Script Dry Run (pull_request) Successful in 9s
Validate Config / Playbook Schema Validation (pull_request) Successful in 20s
Architecture Lint / Lint Repository (pull_request) Failing after 16s
Merge PR #558
124 lines
4.0 KiB
Python
124 lines
4.0 KiB
Python
#!/usr/bin/env python3
|
|
"""Tests for nexus_smoke_test.py — verifies smoke test logic."""
|
|
|
|
import json
|
|
import sys
|
|
from pathlib import Path
|
|
|
|
sys.path.insert(0, str(Path(__file__).parent.parent / "scripts"))
|
|
|
|
from nexus_smoke_test import (
|
|
Severity, SmokeCheck, SmokeResult,
|
|
format_result, _parse_json_response,
|
|
)
|
|
|
|
|
|
def test_parse_json_clean():
|
|
result = _parse_json_response('{"status": "PASS", "summary": "ok"}')
|
|
assert result["status"] == "PASS"
|
|
print(" PASS: test_parse_json_clean")
|
|
|
|
|
|
def test_parse_json_fenced():
|
|
result = _parse_json_response('```json\n{"status": "FAIL"}\n```')
|
|
assert result["status"] == "FAIL"
|
|
print(" PASS: test_parse_json_fenced")
|
|
|
|
|
|
def test_parse_json_garbage():
|
|
result = _parse_json_response("no json here")
|
|
assert result == {}
|
|
print(" PASS: test_parse_json_garbage")
|
|
|
|
|
|
def test_smoke_check_dataclass():
|
|
c = SmokeCheck(name="Test", status=Severity.PASS, message="All good")
|
|
assert c.name == "Test"
|
|
assert c.status == Severity.PASS
|
|
print(" PASS: test_smoke_check_dataclass")
|
|
|
|
|
|
def test_smoke_result_dataclass():
|
|
r = SmokeResult(url="https://example.com", status=Severity.PASS)
|
|
r.checks.append(SmokeCheck(name="Page Loads", status=Severity.PASS))
|
|
assert len(r.checks) == 1
|
|
assert r.url == "https://example.com"
|
|
print(" PASS: test_smoke_result_dataclass")
|
|
|
|
|
|
def test_format_json():
|
|
r = SmokeResult(url="https://test.com", status=Severity.PASS, summary="All good", duration_ms=100)
|
|
r.checks.append(SmokeCheck(name="Test", status=Severity.PASS, message="OK"))
|
|
output = format_result(r, "json")
|
|
parsed = json.loads(output)
|
|
assert parsed["status"] == "pass"
|
|
assert parsed["url"] == "https://test.com"
|
|
assert len(parsed["checks"]) == 1
|
|
print(" PASS: test_format_json")
|
|
|
|
|
|
def test_format_text():
|
|
r = SmokeResult(url="https://test.com", status=Severity.WARN, summary="1 warning", duration_ms=200)
|
|
r.checks.append(SmokeCheck(name="Screenshot", status=Severity.WARN, message="No backend"))
|
|
output = format_result(r, "text")
|
|
assert "NEXUS VISUAL SMOKE TEST" in output
|
|
assert "https://test.com" in output
|
|
assert "WARN" in output
|
|
print(" PASS: test_format_text")
|
|
|
|
|
|
def test_format_text_pass():
|
|
r = SmokeResult(url="https://test.com", status=Severity.PASS, summary="All clear")
|
|
r.checks.append(SmokeCheck(name="Page Loads", status=Severity.PASS, message="HTTP 200"))
|
|
r.checks.append(SmokeCheck(name="HTML Content", status=Severity.PASS, message="Valid"))
|
|
output = format_result(r, "text")
|
|
assert "✅" in output
|
|
assert "Page Loads" in output
|
|
print(" PASS: test_format_text")
|
|
|
|
|
|
def test_severity_enum():
|
|
assert Severity.PASS.value == "pass"
|
|
assert Severity.FAIL.value == "fail"
|
|
assert Severity.WARN.value == "warn"
|
|
print(" PASS: test_severity_enum")
|
|
|
|
|
|
def test_overall_status_logic():
|
|
# All pass
|
|
r = SmokeResult()
|
|
r.checks = [SmokeCheck(name="a", status=Severity.PASS), SmokeCheck(name="b", status=Severity.PASS)]
|
|
fails = sum(1 for c in r.checks if c.status == Severity.FAIL)
|
|
warns = sum(1 for c in r.checks if c.status == Severity.WARN)
|
|
assert fails == 0 and warns == 0
|
|
|
|
# One fail
|
|
r.checks.append(SmokeCheck(name="c", status=Severity.FAIL))
|
|
fails = sum(1 for c in r.checks if c.status == Severity.FAIL)
|
|
assert fails == 1
|
|
print(" PASS: test_overall_status_logic")
|
|
|
|
|
|
def run_all():
|
|
print("=== nexus_smoke_test tests ===")
|
|
tests = [
|
|
test_parse_json_clean, test_parse_json_fenced, test_parse_json_garbage,
|
|
test_smoke_check_dataclass, test_smoke_result_dataclass,
|
|
test_format_json, test_format_text, test_format_text_pass,
|
|
test_severity_enum, test_overall_status_logic,
|
|
]
|
|
passed = failed = 0
|
|
for t in tests:
|
|
try:
|
|
t()
|
|
passed += 1
|
|
except Exception as e:
|
|
print(f" FAIL: {t.__name__} — {e}")
|
|
failed += 1
|
|
print(f"\n{'ALL PASSED' if failed == 0 else f'{failed} FAILED'}: {passed}/{len(tests)}")
|
|
return failed == 0
|
|
|
|
|
|
if __name__ == "__main__":
|
|
sys.exit(0 if run_all() else 1)
|