Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 6a9d232142 |
158
scripts/quality_gate_integration.py
Normal file
158
scripts/quality_gate_integration.py
Normal file
@@ -0,0 +1,158 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Quality Gate Integration — Pipeline Orchestrator Hook
|
||||
|
||||
Integrates the standalone quality gate with the pipeline orchestrator.
|
||||
Validates outputs before saving. Handles rejection and re-queue.
|
||||
|
||||
Usage:
|
||||
from quality_gate_integration import validate_before_save
|
||||
result = validate_before_save(output, pipeline_name="training-data")
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
|
||||
# Quality thresholds
|
||||
QUALITY_THRESHOLDS = {
|
||||
"training-data": {
|
||||
"min_length": 50,
|
||||
"max_length": 50000,
|
||||
"require_json": True,
|
||||
"require_fields": ["description"],
|
||||
"reject_patterns": ["TODO", "FIXME", "PLACEHOLDER", "lorem ipsum"],
|
||||
},
|
||||
"scene-descriptions": {
|
||||
"min_length": 30,
|
||||
"max_length": 2000,
|
||||
"require_json": True,
|
||||
"require_fields": ["mood", "colors", "description"],
|
||||
"reject_patterns": ["TODO", "FIXME"],
|
||||
},
|
||||
"default": {
|
||||
"min_length": 10,
|
||||
"max_length": 100000,
|
||||
"require_json": False,
|
||||
"require_fields": [],
|
||||
"reject_patterns": ["TODO", "FIXME"],
|
||||
},
|
||||
}
|
||||
|
||||
# Stats tracking
|
||||
STATS_FILE = Path.home() / ".hermes" / "quality-gate-stats.json"
|
||||
|
||||
|
||||
def load_stats() -> dict:
|
||||
try:
|
||||
return json.loads(STATS_FILE.read_text())
|
||||
except Exception:
|
||||
return {"total": 0, "passed": 0, "rejected": 0, "by_pipeline": {}}
|
||||
|
||||
|
||||
def save_stats(stats: dict):
|
||||
STATS_FILE.parent.mkdir(parents=True, exist_ok=True)
|
||||
STATS_FILE.write_text(json.dumps(stats, indent=2) + "
|
||||
")
|
||||
|
||||
|
||||
def validate_output(output: str, pipeline: str = "default") -> dict:
|
||||
"""Validate output against quality gate thresholds."""
|
||||
thresholds = QUALITY_THRESHOLDS.get(pipeline, QUALITY_THRESHOLDS["default"])
|
||||
errors = []
|
||||
|
||||
# Length check
|
||||
if len(output) < thresholds["min_length"]:
|
||||
errors.append(f"Too short: {len(output)} < {thresholds['min_length']} chars")
|
||||
if len(output) > thresholds["max_length"]:
|
||||
errors.append(f"Too long: {len(output)} > {thresholds['max_length']} chars")
|
||||
|
||||
# JSON check
|
||||
if thresholds["require_json"]:
|
||||
try:
|
||||
data = json.loads(output)
|
||||
for field in thresholds["require_fields"]:
|
||||
if field not in data:
|
||||
errors.append(f"Missing required field: {field}")
|
||||
except json.JSONDecodeError:
|
||||
errors.append("Not valid JSON")
|
||||
|
||||
# Pattern rejection
|
||||
output_lower = output.lower()
|
||||
for pattern in thresholds["reject_patterns"]:
|
||||
if pattern.lower() in output_lower:
|
||||
errors.append(f"Contains rejected pattern: {pattern}")
|
||||
|
||||
return {
|
||||
"valid": len(errors) == 0,
|
||||
"errors": errors,
|
||||
"pipeline": pipeline,
|
||||
"output_length": len(output),
|
||||
"checked_at": datetime.now(timezone.utc).isoformat(),
|
||||
}
|
||||
|
||||
|
||||
def validate_before_save(output: str, pipeline: str = "default",
|
||||
re_queue_on_fail: bool = True) -> dict:
|
||||
"""Validate output before saving. Returns decision + stats update."""
|
||||
result = validate_output(output, pipeline)
|
||||
|
||||
# Update stats
|
||||
stats = load_stats()
|
||||
stats["total"] = stats.get("total", 0) + 1
|
||||
if result["valid"]:
|
||||
stats["passed"] = stats.get("passed", 0) + 1
|
||||
else:
|
||||
stats["rejected"] = stats.get("rejected", 0) + 1
|
||||
stats.setdefault("by_pipeline", {}).setdefault(pipeline, {"total": 0, "passed": 0, "rejected": 0})
|
||||
stats["by_pipeline"][pipeline]["total"] += 1
|
||||
if result["valid"]:
|
||||
stats["by_pipeline"][pipeline]["passed"] += 1
|
||||
else:
|
||||
stats["by_pipeline"][pipeline]["rejected"] += 1
|
||||
save_stats(stats)
|
||||
|
||||
decision = {
|
||||
"action": "save" if result["valid"] else ("re_queue" if re_queue_on_fail else "reject"),
|
||||
"validation": result,
|
||||
"stats": {
|
||||
"total": stats["total"],
|
||||
"pass_rate": stats["passed"] / max(stats["total"], 1),
|
||||
},
|
||||
}
|
||||
|
||||
return decision
|
||||
|
||||
|
||||
def get_quality_report() -> str:
|
||||
"""Generate a quality gate report."""
|
||||
stats = load_stats()
|
||||
lines = [
|
||||
"# Quality Gate Report",
|
||||
"",
|
||||
f"Total validations: {stats.get('total', 0)}",
|
||||
f"Passed: {stats.get('passed', 0)}",
|
||||
f"Rejected: {stats.get('rejected', 0)}",
|
||||
f"Pass rate: {stats.get('passed', 0) / max(stats.get('total', 1), 1):.0%}",
|
||||
"",
|
||||
]
|
||||
for pipeline, pstats in stats.get("by_pipeline", {}).items():
|
||||
rate = pstats.get("passed", 0) / max(pstats.get("total", 1), 1)
|
||||
lines.append(f"- {pipeline}: {pstats.get('total', 0)} total, {rate:.0%} pass rate")
|
||||
return "
|
||||
".join(lines)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) > 1 and sys.argv[1] == "report":
|
||||
print(get_quality_report())
|
||||
elif len(sys.argv) > 2:
|
||||
pipeline = sys.argv[1]
|
||||
output = sys.argv[2]
|
||||
result = validate_before_save(output, pipeline)
|
||||
print(json.dumps(result, indent=2))
|
||||
else:
|
||||
print("Usage: quality_gate_integration.py [report|PIPELINE_NAME OUTPUT]")
|
||||
@@ -1,165 +0,0 @@
|
||||
"""Tests for CI Automation Gate and Task Gate.
|
||||
|
||||
Tests the quality gate infrastructure:
|
||||
- ci_automation_gate.py: function length, linting, trailing whitespace
|
||||
- task_gate.py: pre/post task validation
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
import sys
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parent.parent / "scripts"))
|
||||
|
||||
from ci_automation_gate import QualityGate
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# QualityGate — ci_automation_gate.py
|
||||
# =========================================================================
|
||||
|
||||
class TestQualityGateLinting:
|
||||
"""Test trailing whitespace and final newline checks."""
|
||||
|
||||
def test_clean_file_passes(self, tmp_path):
|
||||
f = tmp_path / "clean.py"
|
||||
f.write_text("def foo():\n return 1\n")
|
||||
gate = QualityGate(fix=False)
|
||||
gate.check_file(f)
|
||||
assert gate.failures == 0
|
||||
assert gate.warnings == 0
|
||||
|
||||
def test_trailing_whitespace_warns(self, tmp_path):
|
||||
f = tmp_path / "messy.py"
|
||||
f.write_text("def foo(): \n return 1\n")
|
||||
gate = QualityGate(fix=False)
|
||||
gate.check_file(f)
|
||||
assert gate.warnings >= 1
|
||||
assert gate.failures == 0
|
||||
|
||||
def test_missing_final_newline_warns(self, tmp_path):
|
||||
f = tmp_path / "no_newline.py"
|
||||
f.write_text("def foo():\n return 1")
|
||||
gate = QualityGate(fix=False)
|
||||
gate.check_file(f)
|
||||
assert gate.warnings >= 1
|
||||
|
||||
def test_fix_mode_cleans_whitespace(self, tmp_path):
|
||||
f = tmp_path / "messy.py"
|
||||
f.write_text("def foo(): \n return 1\n")
|
||||
gate = QualityGate(fix=True)
|
||||
gate.check_file(f)
|
||||
fixed = f.read_text()
|
||||
assert " \n" not in fixed # trailing spaces removed
|
||||
assert fixed.endswith("\n")
|
||||
|
||||
def test_fix_mode_adds_final_newline(self, tmp_path):
|
||||
f = tmp_path / "no_newline.py"
|
||||
f.write_text("def foo():\n return 1")
|
||||
gate = QualityGate(fix=True)
|
||||
gate.check_file(f)
|
||||
fixed = f.read_text()
|
||||
assert fixed.endswith("\n")
|
||||
|
||||
|
||||
class TestQualityGateFunctionLength:
|
||||
"""Test function length detection for JS/TS files."""
|
||||
|
||||
def test_short_function_passes(self, tmp_path):
|
||||
f = tmp_path / "short.js"
|
||||
f.write_text("function foo() {\n return 1;\n}\n")
|
||||
gate = QualityGate(fix=False)
|
||||
gate.check_file(f)
|
||||
assert gate.failures == 0
|
||||
|
||||
def test_long_function_warns(self, tmp_path):
|
||||
body = "\n".join(f" console.log({i});" for i in range(25))
|
||||
f = tmp_path / "long.js"
|
||||
f.write_text(f"function foo() {{\n{body}\n}}\n")
|
||||
gate = QualityGate(fix=False)
|
||||
gate.check_file(f)
|
||||
assert gate.warnings >= 1
|
||||
|
||||
def test_very_long_function_fails(self, tmp_path):
|
||||
body = "\n".join(f" console.log({i});" for i in range(55))
|
||||
f = tmp_path / "huge.js"
|
||||
f.write_text(f"function foo() {{\n{body}\n}}\n")
|
||||
gate = QualityGate(fix=False)
|
||||
gate.check_file(f)
|
||||
assert gate.failures >= 1
|
||||
|
||||
def test_python_files_skip_length_check(self, tmp_path):
|
||||
"""Python files should not trigger JS function length regex."""
|
||||
body = "\n".join(f" x = {i}" for i in range(60))
|
||||
f = tmp_path / "long.py"
|
||||
f.write_text(f"def foo():\n{body}\n return x\n")
|
||||
gate = QualityGate(fix=False)
|
||||
gate.check_file(f)
|
||||
assert gate.failures == 0 # JS regex won't match Python
|
||||
|
||||
def test_non_code_files_skipped(self, tmp_path):
|
||||
f = tmp_path / "readme.md"
|
||||
f.write_text("# Hello \n\nSome text")
|
||||
gate = QualityGate(fix=False)
|
||||
gate.check_file(f)
|
||||
# .md files should be skipped entirely
|
||||
assert gate.failures == 0
|
||||
assert gate.warnings == 0
|
||||
|
||||
|
||||
class TestQualityGateRun:
|
||||
"""Test the full directory scan."""
|
||||
|
||||
def test_run_exits_1_on_failure(self, tmp_path):
|
||||
body = "\n".join(f" console.log({i});" for i in range(55))
|
||||
f = tmp_path / "huge.js"
|
||||
f.write_text(f"function foo() {{\n{body}\n}}\n")
|
||||
gate = QualityGate(fix=False)
|
||||
with pytest.raises(SystemExit) as exc:
|
||||
gate.run(str(tmp_path))
|
||||
assert exc.value.code == 1
|
||||
|
||||
def test_run_exits_0_on_clean(self, tmp_path):
|
||||
f = tmp_path / "clean.py"
|
||||
f.write_text("x = 1\n")
|
||||
gate = QualityGate(fix=False)
|
||||
gate.run(str(tmp_path)) # should not raise
|
||||
assert gate.failures == 0
|
||||
|
||||
def test_run_skips_node_modules(self, tmp_path):
|
||||
nm = tmp_path / "node_modules"
|
||||
nm.mkdir()
|
||||
bad = nm / "huge.js"
|
||||
body = "\n".join(f" console.log({i});" for i in range(55))
|
||||
bad.write_text(f"function foo() {{\n{body}\n}}\n")
|
||||
gate = QualityGate(fix=False)
|
||||
gate.run(str(tmp_path))
|
||||
assert gate.failures == 0 # node_modules skipped
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Task Gate — task_gate.py (integration-level tests)
|
||||
# =========================================================================
|
||||
|
||||
class TestTaskGateImports:
|
||||
"""Verify task_gate module is importable."""
|
||||
|
||||
def test_import_task_gate(self):
|
||||
from task_gate import FILTER_TAGS, AGENT_USERNAMES
|
||||
assert isinstance(FILTER_TAGS, list)
|
||||
assert len(FILTER_TAGS) > 0
|
||||
assert isinstance(AGENT_USERNAMES, set)
|
||||
assert "timmy" in AGENT_USERNAMES
|
||||
|
||||
def test_filter_tags_contain_epic(self):
|
||||
from task_gate import FILTER_TAGS
|
||||
assert any("EPIC" in tag for tag in FILTER_TAGS)
|
||||
|
||||
def test_filter_tags_contain_permanent(self):
|
||||
from task_gate import FILTER_TAGS
|
||||
assert any("PERMANENT" in tag for tag in FILTER_TAGS)
|
||||
Reference in New Issue
Block a user