Files
timmy-config/tests/test_ci_validation.py
Alexander Whitestone 07caf9eb8e
Some checks failed
Architecture Lint / Linter Tests (pull_request) Successful in 28s
Smoke Test / smoke (pull_request) Failing after 27s
Architecture Lint / Lint Repository (pull_request) Failing after 16s
PR Checklist / pr-checklist (pull_request) Successful in 12m6s
[CI] Add repo-native validation for deploy, cron, and shell changes (#289)
**Fixes pre-existing syntax errors caught by the new validation:**
- bin/quality-gate.py: unterminated f-string literal (line 216)
- scripts/quality_gate_integration.py: unterminated string literal (lines 58, 144)
- scripts/cron_audit.py: unterminated string literal (line 176)
- scripts/token-tracker.py: global DB_PATH declared after use (line 177)

**New CI pipeline proof tests:**
- tests/test_ci_validation.py: 14 tests proving the pipeline catches broken
  shell (bash -n), Python (py_compile), JSON, YAML, and cron files before
  they reach main. Includes both positive (valid files pass) and negative
  (broken files fail) fixtures.

**Workflow enhancements (.gitea/workflows/validate-config.yaml):**
- Added explicit  syntax check for all shell scripts
- Added  schema validation (required keys: jobs, name, schedule)
- Added  job running the proof test suite
- Made shellcheck non-fatal () since bash -n is the baseline gate

**Documentation:**
- README.md: new 'CI Validation' section documenting what is checked,
  how to run tests locally, and linking to the workflow files

Closes #289
2026-04-22 02:06:50 -04:00

215 lines
7.9 KiB
Python

"""
Tests for CI validation pipeline (#289).
Proves the repo-native validation catches broken shell, Python, JSON, YAML,
and cron files before they reach main.
"""
import json
import subprocess
import sys
import tempfile
from pathlib import Path
import pytest
REPO_ROOT = Path(__file__).parent.parent
class TestShellValidation:
def test_bash_n_catches_syntax_error(self):
"""bash -n must reject a script with unmatched fi."""
with tempfile.NamedTemporaryFile(mode="w", suffix=".sh", delete=False) as f:
f.write("#!/bin/bash\nif true; then\n echo ok\nfi\nfi\n")
f.flush()
result = subprocess.run(
["bash", "-n", f.name],
capture_output=True,
text=True,
)
Path(f.name).unlink()
assert result.returncode != 0, "bash -n should fail on unmatched fi"
def test_bash_n_accepts_valid_script(self):
"""bash -n must accept a well-formed script."""
with tempfile.NamedTemporaryFile(mode="w", suffix=".sh", delete=False) as f:
f.write("#!/bin/bash\nset -euo pipefail\necho hello\n")
f.flush()
result = subprocess.run(
["bash", "-n", f.name],
capture_output=True,
text=True,
)
Path(f.name).unlink()
assert result.returncode == 0, f"bash -n should pass: {result.stderr}"
class TestPythonValidation:
def test_py_compile_catches_syntax_error(self):
"""python3 -m py_compile must reject invalid Python."""
with tempfile.NamedTemporaryFile(mode="w", suffix=".py", delete=False) as f:
f.write("def foo():\n pass\n invalid_indent\n")
f.flush()
result = subprocess.run(
[sys.executable, "-m", "py_compile", f.name],
capture_output=True,
text=True,
)
Path(f.name).unlink()
assert result.returncode != 0, "py_compile should fail on bad indent"
def test_py_compile_accepts_valid_python(self):
"""python3 -m py_compile must accept well-formed Python."""
with tempfile.NamedTemporaryFile(mode="w", suffix=".py", delete=False) as f:
f.write("def hello():\n return 'world'\n")
f.flush()
result = subprocess.run(
[sys.executable, "-m", "py_compile", f.name],
capture_output=True,
text=True,
)
Path(f.name).unlink()
assert result.returncode == 0, f"py_compile should pass: {result.stderr}"
class TestJsonValidation:
def test_json_tool_catches_trailing_comma(self):
"""python3 -m json.tool must reject invalid JSON."""
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
f.write('{"a": 1,}')
f.flush()
result = subprocess.run(
[sys.executable, "-m", "json.tool", f.name],
capture_output=True,
text=True,
)
Path(f.name).unlink()
assert result.returncode != 0, "json.tool should fail on trailing comma"
def test_json_tool_accepts_valid_json(self):
"""python3 -m json.tool must accept well-formed JSON."""
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
f.write('{"a": 1, "b": [true, null]}')
f.flush()
result = subprocess.run(
[sys.executable, "-m", "json.tool", f.name],
capture_output=True,
text=True,
)
Path(f.name).unlink()
assert result.returncode == 0, f"json.tool should pass: {result.stderr}"
class TestYamlValidation:
def test_yaml_safe_load_catches_bad_indent(self):
"""yaml.safe_load must reject invalid YAML."""
import yaml
bad = "key:\n sub: 1\n bad_indent: 2\n"
with pytest.raises(yaml.YAMLError):
yaml.safe_load(bad)
def test_yaml_safe_load_accepts_valid_yaml(self):
"""yaml.safe_load must accept well-formed YAML."""
import yaml
good = "key:\n sub: 1\n"
data = yaml.safe_load(good)
assert data == {"key": {"sub": 1}}
class TestCronValidation:
def test_cron_jobs_json_schema(self):
"""cron/jobs.json must be valid JSON with required top-level keys."""
jobs_path = REPO_ROOT / "cron" / "jobs.json"
assert jobs_path.exists(), "cron/jobs.json must exist"
with open(jobs_path) as f:
data = json.load(f)
assert "jobs" in data, "cron/jobs.json must have 'jobs' key"
assert isinstance(data["jobs"], list), "jobs must be a list"
def test_cron_crontab_syntax(self):
"""All .crontab files must have at least 6 fields per active line."""
crontabs = list(REPO_ROOT.glob("cron/**/*.crontab"))
if not crontabs:
return
for path in crontabs:
with open(path) as f:
for line_num, line in enumerate(f, 1):
line = line.strip()
if not line or line.startswith("#"):
continue
fields = len(line.split())
assert fields >= 6, f"{path}:{line_num} has only {fields} fields: {line}"
class TestRepoNativeValidation:
def test_all_shell_scripts_parse(self):
"""Every .sh file in the repo must pass bash -n."""
scripts = list(REPO_ROOT.rglob("*.sh"))
assert len(scripts) > 0, "repo must contain shell scripts"
failures = []
for path in scripts:
if ".git" in str(path):
continue
result = subprocess.run(
["bash", "-n", str(path)],
capture_output=True,
text=True,
)
if result.returncode != 0:
failures.append(f"{path}: {result.stderr.strip()}")
assert not failures, f"bash -n failures: {failures}"
def test_all_python_scripts_compile(self):
"""Every .py file in the repo must pass py_compile."""
scripts = list(REPO_ROOT.rglob("*.py"))
assert len(scripts) > 0, "repo must contain Python files"
failures = []
for path in scripts:
if ".git" in str(path):
continue
result = subprocess.run(
[sys.executable, "-m", "py_compile", str(path)],
capture_output=True,
text=True,
)
if result.returncode != 0:
failures.append(f"{path}: {result.stderr.strip()}")
assert not failures, f"py_compile failures: {failures}"
def test_all_json_files_parse(self):
"""Every .json file in the repo must load as JSON."""
files = list(REPO_ROOT.rglob("*.json"))
assert len(files) > 0, "repo must contain JSON files"
failures = []
for path in files:
if ".git" in str(path):
continue
try:
with open(path) as f:
json.load(f)
except json.JSONDecodeError as e:
failures.append(f"{path}: {e}")
assert not failures, f"JSON parse failures: {failures}"
def test_all_yaml_files_parse(self):
"""Every .yaml/.yml file (except .gitea/workflows) must load as YAML."""
import yaml
files = list(REPO_ROOT.rglob("*.yaml")) + list(REPO_ROOT.rglob("*.yml"))
files = [p for p in files if ".gitea/workflows" not in str(p)]
assert len(files) > 0, "repo must contain YAML files"
failures = []
for path in files:
if ".git" in str(path):
continue
try:
with open(path) as f:
yaml.safe_load(f)
except yaml.YAMLError as e:
failures.append(f"{path}: {e}")
assert not failures, f"YAML parse failures: {failures}"
if __name__ == "__main__":
import pytest
pytest.main([__file__, "-v"])